Add files using upload-large-folder tool
Browse files- config.json +85 -0
 - configuration_llada.py +463 -0
 - generation_config.json +6 -0
 - latest +1 -0
 - model-00001-of-00004.safetensors +3 -0
 - model-00002-of-00004.safetensors +3 -0
 - model-00003-of-00004.safetensors +3 -0
 - model-00004-of-00004.safetensors +3 -0
 - model.safetensors.index.json +724 -0
 - scheduler.pt +3 -0
 - special_tokens_map.json +38 -0
 - tokenizer.json +0 -0
 - tokenizer_config.json +2184 -0
 - trainer_state.json +2309 -0
 - training_args.bin +3 -0
 - zero_to_fp32.py +760 -0
 
    	
        config.json
    ADDED
    
    | 
         @@ -0,0 +1,85 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "activation_type": "silu",
         
     | 
| 3 | 
         
            +
              "add_faster_video": false,
         
     | 
| 4 | 
         
            +
              "add_time_instruction": false,
         
     | 
| 5 | 
         
            +
              "alibi": false,
         
     | 
| 6 | 
         
            +
              "alibi_bias_max": 8.0,
         
     | 
| 7 | 
         
            +
              "architectures": [
         
     | 
| 8 | 
         
            +
                "LlavaLladaForMaskedDiffusion"
         
     | 
| 9 | 
         
            +
              ],
         
     | 
| 10 | 
         
            +
              "attention_dropout": 0.0,
         
     | 
| 11 | 
         
            +
              "attention_layer_norm": false,
         
     | 
| 12 | 
         
            +
              "attention_layer_norm_with_affine": true,
         
     | 
| 13 | 
         
            +
              "auto_map": {
         
     | 
| 14 | 
         
            +
                "AutoConfig": "configuration_llada.LLaDAConfig",
         
     | 
| 15 | 
         
            +
                "AutoModel": "modeling_llada.LLaDAModelLM",
         
     | 
| 16 | 
         
            +
                "AutoModelForCausalLM": "modeling_llada.LLaDAModelLM"
         
     | 
| 17 | 
         
            +
              },
         
     | 
| 18 | 
         
            +
              "bias_for_layer_norm": false,
         
     | 
| 19 | 
         
            +
              "block_group_size": 1,
         
     | 
| 20 | 
         
            +
              "block_type": "llama",
         
     | 
| 21 | 
         
            +
              "d_model": 4096,
         
     | 
| 22 | 
         
            +
              "embedding_dropout": 0.0,
         
     | 
| 23 | 
         
            +
              "embedding_size": 126464,
         
     | 
| 24 | 
         
            +
              "eos_token_id": 126081,
         
     | 
| 25 | 
         
            +
              "faster_token_stride": 10,
         
     | 
| 26 | 
         
            +
              "flash_attention": false,
         
     | 
| 27 | 
         
            +
              "force_sample": false,
         
     | 
| 28 | 
         
            +
              "image_aspect_ratio": "square",
         
     | 
| 29 | 
         
            +
              "image_crop_resolution": null,
         
     | 
| 30 | 
         
            +
              "image_grid_pinpoints": null,
         
     | 
| 31 | 
         
            +
              "image_split_resolution": null,
         
     | 
| 32 | 
         
            +
              "include_bias": false,
         
     | 
| 33 | 
         
            +
              "include_qkv_bias": false,
         
     | 
| 34 | 
         
            +
              "init_cutoff_factor": null,
         
     | 
| 35 | 
         
            +
              "init_device": "meta",
         
     | 
| 36 | 
         
            +
              "init_fn": "mitchell",
         
     | 
| 37 | 
         
            +
              "init_std": 0.02,
         
     | 
| 38 | 
         
            +
              "input_emb_norm": false,
         
     | 
| 39 | 
         
            +
              "layer_norm_type": "rms",
         
     | 
| 40 | 
         
            +
              "layer_norm_with_affine": true,
         
     | 
| 41 | 
         
            +
              "mask_token_id": 126336,
         
     | 
| 42 | 
         
            +
              "max_sequence_length": 4096,
         
     | 
| 43 | 
         
            +
              "mlp_hidden_size": 12288,
         
     | 
| 44 | 
         
            +
              "mlp_ratio": 4,
         
     | 
| 45 | 
         
            +
              "mm_hidden_size": 1152,
         
     | 
| 46 | 
         
            +
              "mm_newline_position": "grid",
         
     | 
| 47 | 
         
            +
              "mm_patch_merge_type": "spatial_unpad",
         
     | 
| 48 | 
         
            +
              "mm_pooler_ratio": 2,
         
     | 
| 49 | 
         
            +
              "mm_projector_lr": null,
         
     | 
| 50 | 
         
            +
              "mm_projector_type": "mlp2x_gelu",
         
     | 
| 51 | 
         
            +
              "mm_spatial_pool_mode": "bilinear",
         
     | 
| 52 | 
         
            +
              "mm_spatial_pool_stride": null,
         
     | 
| 53 | 
         
            +
              "mm_tunable_parts": "mm_vision_tower,mm_mlp_adapter,mm_language_model",
         
     | 
| 54 | 
         
            +
              "mm_use_im_patch_token": false,
         
     | 
| 55 | 
         
            +
              "mm_use_im_start_end": false,
         
     | 
| 56 | 
         
            +
              "mm_vision_select_feature": "patch",
         
     | 
| 57 | 
         
            +
              "mm_vision_select_layer": -2,
         
     | 
| 58 | 
         
            +
              "mm_vision_tower": "/data/siglip-so400m-patch14-384",
         
     | 
| 59 | 
         
            +
              "mm_vision_tower_lr": 2e-06,
         
     | 
| 60 | 
         
            +
              "model_type": "llada",
         
     | 
| 61 | 
         
            +
              "multi_query_attention": null,
         
     | 
| 62 | 
         
            +
              "n_heads": 32,
         
     | 
| 63 | 
         
            +
              "n_kv_heads": 32,
         
     | 
| 64 | 
         
            +
              "n_layers": 32,
         
     | 
| 65 | 
         
            +
              "pad_token_id": 126081,
         
     | 
| 66 | 
         
            +
              "pos_skipping_range": 4096,
         
     | 
| 67 | 
         
            +
              "precision": "amp_bf16",
         
     | 
| 68 | 
         
            +
              "resampler_type": null,
         
     | 
| 69 | 
         
            +
              "residual_dropout": 0.0,
         
     | 
| 70 | 
         
            +
              "rms_norm_eps": 1e-05,
         
     | 
| 71 | 
         
            +
              "rope": true,
         
     | 
| 72 | 
         
            +
              "rope_full_precision": true,
         
     | 
| 73 | 
         
            +
              "rope_theta": 500000.0,
         
     | 
| 74 | 
         
            +
              "scale_logits": false,
         
     | 
| 75 | 
         
            +
              "tokenizer_model_max_length": 2048,
         
     | 
| 76 | 
         
            +
              "tokenizer_padding_side": "right",
         
     | 
| 77 | 
         
            +
              "torch_dtype": "bfloat16",
         
     | 
| 78 | 
         
            +
              "transformers_version": "4.50.3",
         
     | 
| 79 | 
         
            +
              "use_cache": false,
         
     | 
| 80 | 
         
            +
              "use_mm_proj": true,
         
     | 
| 81 | 
         
            +
              "use_pos_skipping": false,
         
     | 
| 82 | 
         
            +
              "vision_tower_pretrained": null,
         
     | 
| 83 | 
         
            +
              "vocab_size": 126464,
         
     | 
| 84 | 
         
            +
              "weight_tying": false
         
     | 
| 85 | 
         
            +
            }
         
     | 
    	
        configuration_llada.py
    ADDED
    
    | 
         @@ -0,0 +1,463 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            """
         
     | 
| 2 | 
         
            +
            LLaDA configuration
         
     | 
| 3 | 
         
            +
            """
         
     | 
| 4 | 
         
            +
            from transformers import AutoConfig, PretrainedConfig
         
     | 
| 5 | 
         
            +
             
     | 
| 6 | 
         
            +
            from enum import Enum
         
     | 
| 7 | 
         
            +
            from os import PathLike
         
     | 
| 8 | 
         
            +
            from typing import Union
         
     | 
| 9 | 
         
            +
            from dataclasses import asdict, dataclass, field
         
     | 
| 10 | 
         
            +
            from glob import glob
         
     | 
| 11 | 
         
            +
            from pathlib import Path
         
     | 
| 12 | 
         
            +
            from typing import (
         
     | 
| 13 | 
         
            +
                Any,
         
     | 
| 14 | 
         
            +
                Dict,
         
     | 
| 15 | 
         
            +
                Iterable,
         
     | 
| 16 | 
         
            +
                List,
         
     | 
| 17 | 
         
            +
                Optional,
         
     | 
| 18 | 
         
            +
                Tuple,
         
     | 
| 19 | 
         
            +
                Type,
         
     | 
| 20 | 
         
            +
                TypeVar,
         
     | 
| 21 | 
         
            +
                Union,
         
     | 
| 22 | 
         
            +
                cast,
         
     | 
| 23 | 
         
            +
            )
         
     | 
| 24 | 
         
            +
             
     | 
| 25 | 
         
            +
             
     | 
| 26 | 
         
            +
            __all__ = [
         
     | 
| 27 | 
         
            +
                "ActivationType",
         
     | 
| 28 | 
         
            +
                "ActivationCheckpointingStrategy",
         
     | 
| 29 | 
         
            +
                "BlockType",
         
     | 
| 30 | 
         
            +
                "LayerNormType",
         
     | 
| 31 | 
         
            +
                "InitFnType",
         
     | 
| 32 | 
         
            +
                "ModelConfig",
         
     | 
| 33 | 
         
            +
            ]
         
     | 
| 34 | 
         
            +
             
     | 
| 35 | 
         
            +
            PathOrStr = Union[str, PathLike]
         
     | 
| 36 | 
         
            +
             
     | 
| 37 | 
         
            +
             
     | 
| 38 | 
         
            +
            class StrEnum(str, Enum):
         
     | 
| 39 | 
         
            +
                """
         
     | 
| 40 | 
         
            +
                This is equivalent to Python's :class:`enum.StrEnum` since version 3.11.
         
     | 
| 41 | 
         
            +
                We include this here for compatibility with older version of Python.
         
     | 
| 42 | 
         
            +
                """
         
     | 
| 43 | 
         
            +
             
     | 
| 44 | 
         
            +
                def __str__(self) -> str:
         
     | 
| 45 | 
         
            +
                    return self.value
         
     | 
| 46 | 
         
            +
             
     | 
| 47 | 
         
            +
                def __repr__(self) -> str:
         
     | 
| 48 | 
         
            +
                    return f"'{str(self)}'"
         
     | 
| 49 | 
         
            +
             
     | 
| 50 | 
         
            +
             
     | 
| 51 | 
         
            +
            class LayerNormType(StrEnum):
         
     | 
| 52 | 
         
            +
                default = "default"
         
     | 
| 53 | 
         
            +
                """
         
     | 
| 54 | 
         
            +
                The default LayerNorm implementation, equivalent to PyTorch's built-in version.
         
     | 
| 55 | 
         
            +
                """
         
     | 
| 56 | 
         
            +
             
     | 
| 57 | 
         
            +
                low_precision = "low_precision"
         
     | 
| 58 | 
         
            +
                """
         
     | 
| 59 | 
         
            +
                A low-precision version of the default LayerNorm.
         
     | 
| 60 | 
         
            +
                """
         
     | 
| 61 | 
         
            +
             
     | 
| 62 | 
         
            +
                rms = "rms"
         
     | 
| 63 | 
         
            +
                """
         
     | 
| 64 | 
         
            +
                An RMSNorm implementation. When using ``torch.compile`` this is
         
     | 
| 65 | 
         
            +
                probably the fastest implementation.
         
     | 
| 66 | 
         
            +
                """
         
     | 
| 67 | 
         
            +
             
     | 
| 68 | 
         
            +
                gemma_rms = "gemma_rms"
         
     | 
| 69 | 
         
            +
                """
         
     | 
| 70 | 
         
            +
                An RMSNorm implementation by gemmma. When using ``torch.compile`` this is
         
     | 
| 71 | 
         
            +
                probably the fastest implementation.
         
     | 
| 72 | 
         
            +
                """
         
     | 
| 73 | 
         
            +
             
     | 
| 74 | 
         
            +
                amd_compatible = "amd_compatible"
         
     | 
| 75 | 
         
            +
                """
         
     | 
| 76 | 
         
            +
                LayerNorm implemented manually to work around an issue with ROCm.
         
     | 
| 77 | 
         
            +
                """
         
     | 
| 78 | 
         
            +
             
     | 
| 79 | 
         
            +
             
     | 
| 80 | 
         
            +
            class ActivationType(StrEnum):
         
     | 
| 81 | 
         
            +
                gelu = "gelu"
         
     | 
| 82 | 
         
            +
                relu = "relu"
         
     | 
| 83 | 
         
            +
                silu = "silu"
         
     | 
| 84 | 
         
            +
                swiglu = "swiglu"
         
     | 
| 85 | 
         
            +
             
     | 
| 86 | 
         
            +
             
     | 
| 87 | 
         
            +
            class BlockType(StrEnum):
         
     | 
| 88 | 
         
            +
                sequential = "sequential"
         
     | 
| 89 | 
         
            +
                parallel = "parallel"
         
     | 
| 90 | 
         
            +
             
     | 
| 91 | 
         
            +
                llama = "llama"
         
     | 
| 92 | 
         
            +
                """
         
     | 
| 93 | 
         
            +
                A block similar to the sequential block with slightly different
         
     | 
| 94 | 
         
            +
                implementations of operations like attention to imitate the behavior of Llama.
         
     | 
| 95 | 
         
            +
                """
         
     | 
| 96 | 
         
            +
             
     | 
| 97 | 
         
            +
             
     | 
| 98 | 
         
            +
            class InitFnType(StrEnum):
         
     | 
| 99 | 
         
            +
                mitchell = "mitchell"
         
     | 
| 100 | 
         
            +
                """
         
     | 
| 101 | 
         
            +
                The strategy suggested to us by Mitchell Wortsman from UW.
         
     | 
| 102 | 
         
            +
                This uses a truncated normal distribution with an adaptive standard deviation that depends
         
     | 
| 103 | 
         
            +
                on the size of the weights as well as the depth of the layer.
         
     | 
| 104 | 
         
            +
                """
         
     | 
| 105 | 
         
            +
             
     | 
| 106 | 
         
            +
                normal = "normal"
         
     | 
| 107 | 
         
            +
                """
         
     | 
| 108 | 
         
            +
                All weights are initialized from the same normal distribution.
         
     | 
| 109 | 
         
            +
                """
         
     | 
| 110 | 
         
            +
             
     | 
| 111 | 
         
            +
                kaiming_normal = "kaiming_normal"
         
     | 
| 112 | 
         
            +
                """
         
     | 
| 113 | 
         
            +
                All weights are initialized with the Kaiming method from a normal distribution.
         
     | 
| 114 | 
         
            +
                Note this currently won't work with FSDP.
         
     | 
| 115 | 
         
            +
                """
         
     | 
| 116 | 
         
            +
             
     | 
| 117 | 
         
            +
                fan_in = "fan_in"
         
     | 
| 118 | 
         
            +
                """
         
     | 
| 119 | 
         
            +
                "Fan-in variance scaling", i.e. normal with a standard deviation of ``1/sqrt(d_in)`` where ``d_in``
         
     | 
| 120 | 
         
            +
                is the input dimensionality of the kernel.
         
     | 
| 121 | 
         
            +
                """
         
     | 
| 122 | 
         
            +
             
     | 
| 123 | 
         
            +
                full_megatron = "full_megatron"
         
     | 
| 124 | 
         
            +
                """
         
     | 
| 125 | 
         
            +
                This is what metaseq calls "full megatron init". It is the init used for Llama 2.
         
     | 
| 126 | 
         
            +
                """
         
     | 
| 127 | 
         
            +
             
     | 
| 128 | 
         
            +
             
     | 
| 129 | 
         
            +
            @dataclass
         
     | 
| 130 | 
         
            +
            class ModelConfig():
         
     | 
| 131 | 
         
            +
                """
         
     | 
| 132 | 
         
            +
                LLaDA (model) configuration.
         
     | 
| 133 | 
         
            +
                """
         
     | 
| 134 | 
         
            +
             
     | 
| 135 | 
         
            +
                # Note that the defaults for these attributes are equivalent to the base GPT2 model.
         
     | 
| 136 | 
         
            +
             
     | 
| 137 | 
         
            +
                d_model: int = 768
         
     | 
| 138 | 
         
            +
                """
         
     | 
| 139 | 
         
            +
                The hidden size of the model.
         
     | 
| 140 | 
         
            +
                """
         
     | 
| 141 | 
         
            +
             
     | 
| 142 | 
         
            +
                n_heads: int = 12
         
     | 
| 143 | 
         
            +
                """
         
     | 
| 144 | 
         
            +
                The number of self-attention heads.
         
     | 
| 145 | 
         
            +
                """
         
     | 
| 146 | 
         
            +
             
     | 
| 147 | 
         
            +
                n_kv_heads: Optional[int] = None
         
     | 
| 148 | 
         
            +
                """
         
     | 
| 149 | 
         
            +
                The number of heads to use for keys and values. Defaults to `n_heads`.
         
     | 
| 150 | 
         
            +
                Set this to ``None`` or ``n_heads`` for normal multi-head attention.
         
     | 
| 151 | 
         
            +
                Set this to 1 for multi-query attention.
         
     | 
| 152 | 
         
            +
                Set it to some in-between value for Llama2-style grouped query attention.
         
     | 
| 153 | 
         
            +
                """
         
     | 
| 154 | 
         
            +
             
     | 
| 155 | 
         
            +
                n_layers: int = 12
         
     | 
| 156 | 
         
            +
                """
         
     | 
| 157 | 
         
            +
                The number of layers/blocks.
         
     | 
| 158 | 
         
            +
                """
         
     | 
| 159 | 
         
            +
             
     | 
| 160 | 
         
            +
                mlp_ratio: int = 4
         
     | 
| 161 | 
         
            +
                """
         
     | 
| 162 | 
         
            +
                The ratio of the inner MLP dimensionality to ``d_model``.
         
     | 
| 163 | 
         
            +
                This is only used when ``mlp_hidden_size`` is not set.
         
     | 
| 164 | 
         
            +
                """
         
     | 
| 165 | 
         
            +
             
     | 
| 166 | 
         
            +
                mlp_hidden_size: Optional[int] = None
         
     | 
| 167 | 
         
            +
                """
         
     | 
| 168 | 
         
            +
                Set the exact hidden size for the MLP. Otherwise the inner MLP hidden size will be set to `mlp_ratio * d_model`.
         
     | 
| 169 | 
         
            +
                """
         
     | 
| 170 | 
         
            +
             
     | 
| 171 | 
         
            +
                activation_type: ActivationType = ActivationType.swiglu
         
     | 
| 172 | 
         
            +
                """
         
     | 
| 173 | 
         
            +
                The activation function to use within the MLP layers.
         
     | 
| 174 | 
         
            +
                """
         
     | 
| 175 | 
         
            +
             
     | 
| 176 | 
         
            +
                block_type: BlockType = BlockType.sequential
         
     | 
| 177 | 
         
            +
                """
         
     | 
| 178 | 
         
            +
                The transformer block implementation.
         
     | 
| 179 | 
         
            +
                """
         
     | 
| 180 | 
         
            +
             
     | 
| 181 | 
         
            +
                block_group_size: int = 1
         
     | 
| 182 | 
         
            +
                """
         
     | 
| 183 | 
         
            +
                The number of blocks to group together into a single parent block.
         
     | 
| 184 | 
         
            +
                This has no affect on the number of parameters in the model and is only used to wrap groups
         
     | 
| 185 | 
         
            +
                of blocks together with a single FSDP wrapper during training.
         
     | 
| 186 | 
         
            +
                """
         
     | 
| 187 | 
         
            +
             
     | 
| 188 | 
         
            +
                alibi: bool = False
         
     | 
| 189 | 
         
            +
                """
         
     | 
| 190 | 
         
            +
                If ``True``, use ALiBi embeddings. Mutually exclusive with ``rope``.
         
     | 
| 191 | 
         
            +
                """
         
     | 
| 192 | 
         
            +
             
     | 
| 193 | 
         
            +
                alibi_bias_max: float = 8.0
         
     | 
| 194 | 
         
            +
                """
         
     | 
| 195 | 
         
            +
                Maximum absolute value of ALiBi bias.
         
     | 
| 196 | 
         
            +
                """
         
     | 
| 197 | 
         
            +
             
     | 
| 198 | 
         
            +
                rope: bool = False
         
     | 
| 199 | 
         
            +
                """
         
     | 
| 200 | 
         
            +
                Use rotary positional embeddings (RoPE). Mutually exclusive with ``alibi``.
         
     | 
| 201 | 
         
            +
                """
         
     | 
| 202 | 
         
            +
             
     | 
| 203 | 
         
            +
                rope_full_precision: bool = True
         
     | 
| 204 | 
         
            +
                """
         
     | 
| 205 | 
         
            +
                If ``True``, apply RoPE embeddings at full precision regardless of the input type. Otherwise,
         
     | 
| 206 | 
         
            +
                apply RoPE at the precision of the input.
         
     | 
| 207 | 
         
            +
                """
         
     | 
| 208 | 
         
            +
             
     | 
| 209 | 
         
            +
                flash_attention: bool = False
         
     | 
| 210 | 
         
            +
                """
         
     | 
| 211 | 
         
            +
                If ``True``, use ``FlashAttention``.
         
     | 
| 212 | 
         
            +
                """
         
     | 
| 213 | 
         
            +
             
     | 
| 214 | 
         
            +
                attention_dropout: float = 0.1
         
     | 
| 215 | 
         
            +
                """
         
     | 
| 216 | 
         
            +
                The dropout probability within the attention modules.
         
     | 
| 217 | 
         
            +
                """
         
     | 
| 218 | 
         
            +
             
     | 
| 219 | 
         
            +
                multi_query_attention: Optional[bool] = None
         
     | 
| 220 | 
         
            +
                """
         
     | 
| 221 | 
         
            +
                Use the Multi-Query formulation of attention used in PaLM. This reduces the number of parameters
         
     | 
| 222 | 
         
            +
                and is more efficient during inference.
         
     | 
| 223 | 
         
            +
                """
         
     | 
| 224 | 
         
            +
             
     | 
| 225 | 
         
            +
                attention_layer_norm: bool = False
         
     | 
| 226 | 
         
            +
                """
         
     | 
| 227 | 
         
            +
                Apply layer norm to the keys and queries within the attention mechanism.
         
     | 
| 228 | 
         
            +
                This can help stabilize training.
         
     | 
| 229 | 
         
            +
                """
         
     | 
| 230 | 
         
            +
             
     | 
| 231 | 
         
            +
                residual_dropout: float = 0.1
         
     | 
| 232 | 
         
            +
                """
         
     | 
| 233 | 
         
            +
                The dropout probability for the MLP and attention output within each block.
         
     | 
| 234 | 
         
            +
                """
         
     | 
| 235 | 
         
            +
             
     | 
| 236 | 
         
            +
                embedding_dropout: float = 0.1
         
     | 
| 237 | 
         
            +
                """
         
     | 
| 238 | 
         
            +
                The dropout probability for embeddings.
         
     | 
| 239 | 
         
            +
                """
         
     | 
| 240 | 
         
            +
             
     | 
| 241 | 
         
            +
                input_emb_norm: bool = False
         
     | 
| 242 | 
         
            +
                """
         
     | 
| 243 | 
         
            +
                An input hidden_states norm implementation by gemmma.
         
     | 
| 244 | 
         
            +
                """
         
     | 
| 245 | 
         
            +
             
     | 
| 246 | 
         
            +
                layer_norm_type: LayerNormType = LayerNormType.default
         
     | 
| 247 | 
         
            +
                """
         
     | 
| 248 | 
         
            +
                The layernorm implementation to use.
         
     | 
| 249 | 
         
            +
                """
         
     | 
| 250 | 
         
            +
             
     | 
| 251 | 
         
            +
                layer_norm_with_affine: bool = True
         
     | 
| 252 | 
         
            +
                """
         
     | 
| 253 | 
         
            +
                Whether to include bias and weight parameters for the layer norms.
         
     | 
| 254 | 
         
            +
                This only affects layer norms that are immediately followed by a linear layer in the forward pass,
         
     | 
| 255 | 
         
            +
                so everything except QK-norms. To turn off affines for QK norms as well, set :attr:`attention_layer_norm_with_affine`
         
     | 
| 256 | 
         
            +
                to ``False``.
         
     | 
| 257 | 
         
            +
                """
         
     | 
| 258 | 
         
            +
             
     | 
| 259 | 
         
            +
                rms_norm_eps: float = 1e-05
         
     | 
| 260 | 
         
            +
                """
         
     | 
| 261 | 
         
            +
                The rms layernorm eps param.
         
     | 
| 262 | 
         
            +
                """
         
     | 
| 263 | 
         
            +
             
     | 
| 264 | 
         
            +
                attention_layer_norm_with_affine: bool = True
         
     | 
| 265 | 
         
            +
                """
         
     | 
| 266 | 
         
            +
                Toggle affine transform for the QK norms.
         
     | 
| 267 | 
         
            +
                """
         
     | 
| 268 | 
         
            +
             
     | 
| 269 | 
         
            +
                max_sequence_length: int = 1024
         
     | 
| 270 | 
         
            +
                """
         
     | 
| 271 | 
         
            +
                The maximum input sequence length supported by the model.
         
     | 
| 272 | 
         
            +
                """
         
     | 
| 273 | 
         
            +
             
     | 
| 274 | 
         
            +
                rope_theta: float = 10000.0
         
     | 
| 275 | 
         
            +
                """
         
     | 
| 276 | 
         
            +
                The rope base param.
         
     | 
| 277 | 
         
            +
                """
         
     | 
| 278 | 
         
            +
             
     | 
| 279 | 
         
            +
                include_qkv_bias: Optional[bool] = False
         
     | 
| 280 | 
         
            +
                """
         
     | 
| 281 | 
         
            +
                Whether or not to include bias parameters in qkv linear layers.
         
     | 
| 282 | 
         
            +
                """
         
     | 
| 283 | 
         
            +
             
     | 
| 284 | 
         
            +
                include_bias: bool = False
         
     | 
| 285 | 
         
            +
                """
         
     | 
| 286 | 
         
            +
                Whether or not to include bias parameters in linear layers.
         
     | 
| 287 | 
         
            +
                In PaLM, they got rid of all bias terms because they found that large
         
     | 
| 288 | 
         
            +
                models tend to have near 0 bias terms anyway.
         
     | 
| 289 | 
         
            +
                """
         
     | 
| 290 | 
         
            +
             
     | 
| 291 | 
         
            +
                bias_for_layer_norm: Optional[bool] = None
         
     | 
| 292 | 
         
            +
                """
         
     | 
| 293 | 
         
            +
                Whether or not to include bias parameters in layer norm.
         
     | 
| 294 | 
         
            +
                This is separate from the include_bias parameter, because of a ROCm crash when biases are disabled in
         
     | 
| 295 | 
         
            +
                layer norm.
         
     | 
| 296 | 
         
            +
                When this is None (the default), it inherits the setting from include_bias.
         
     | 
| 297 | 
         
            +
                """
         
     | 
| 298 | 
         
            +
             
     | 
| 299 | 
         
            +
                scale_logits: bool = False
         
     | 
| 300 | 
         
            +
                """
         
     | 
| 301 | 
         
            +
                If ``True``, scale the output logits by ``1 / sqrt(d_model)``.
         
     | 
| 302 | 
         
            +
                """
         
     | 
| 303 | 
         
            +
             
     | 
| 304 | 
         
            +
                vocab_size: int = 50257
         
     | 
| 305 | 
         
            +
                """
         
     | 
| 306 | 
         
            +
                Vocabulary size of the model.
         
     | 
| 307 | 
         
            +
                """
         
     | 
| 308 | 
         
            +
             
     | 
| 309 | 
         
            +
                embedding_size: Optional[int] = 50304
         
     | 
| 310 | 
         
            +
                """
         
     | 
| 311 | 
         
            +
                The number of embeddings, i.e. the number of tokens. If set to ``None`` it will default
         
     | 
| 312 | 
         
            +
                to ``vocab_size``. If ``vocab_size`` is not a multiple of 128, setting this to the
         
     | 
| 313 | 
         
            +
                next multiple of 128 that's greater than ``vocab_size`` can improve throughput
         
     | 
| 314 | 
         
            +
                substantially.
         
     | 
| 315 | 
         
            +
                """
         
     | 
| 316 | 
         
            +
             
     | 
| 317 | 
         
            +
                weight_tying: bool = True
         
     | 
| 318 | 
         
            +
                """
         
     | 
| 319 | 
         
            +
                Whether to tie output linear weights to the input embedding.
         
     | 
| 320 | 
         
            +
                """
         
     | 
| 321 | 
         
            +
             
     | 
| 322 | 
         
            +
                eos_token_id: int = 50256
         
     | 
| 323 | 
         
            +
                """
         
     | 
| 324 | 
         
            +
                The ID of the end-of-sentence special token.
         
     | 
| 325 | 
         
            +
                """
         
     | 
| 326 | 
         
            +
             
     | 
| 327 | 
         
            +
                pad_token_id: int = 50256
         
     | 
| 328 | 
         
            +
                """
         
     | 
| 329 | 
         
            +
                The ID of the token to use for padding. Defaults to the ID of the EOS token.
         
     | 
| 330 | 
         
            +
                """
         
     | 
| 331 | 
         
            +
             
     | 
| 332 | 
         
            +
                mask_token_id: Optional[int] = 50256
         
     | 
| 333 | 
         
            +
                """
         
     | 
| 334 | 
         
            +
                The ID of the token to use for mask token. Defaults to the ID of the EOS token.
         
     | 
| 335 | 
         
            +
                """
         
     | 
| 336 | 
         
            +
             
     | 
| 337 | 
         
            +
                init_device: Optional[str] = None
         
     | 
| 338 | 
         
            +
                """
         
     | 
| 339 | 
         
            +
                The torch device to use when initializing the model parameters, e.g. "cpu", "cuda:0", "meta".
         
     | 
| 340 | 
         
            +
                """
         
     | 
| 341 | 
         
            +
             
     | 
| 342 | 
         
            +
                init_fn: InitFnType = InitFnType.normal
         
     | 
| 343 | 
         
            +
                """
         
     | 
| 344 | 
         
            +
                The weight initialization strategy.
         
     | 
| 345 | 
         
            +
                """
         
     | 
| 346 | 
         
            +
             
     | 
| 347 | 
         
            +
                init_std: float = 0.02
         
     | 
| 348 | 
         
            +
                """
         
     | 
| 349 | 
         
            +
                The standard deviation to use when initializing weights with a "fixed distribution" ``init_fn``, such
         
     | 
| 350 | 
         
            +
                as "normal".
         
     | 
| 351 | 
         
            +
                """
         
     | 
| 352 | 
         
            +
             
     | 
| 353 | 
         
            +
                init_cutoff_factor: Optional[float] = None
         
     | 
| 354 | 
         
            +
                """
         
     | 
| 355 | 
         
            +
                A positive factor used to scale the cutoff values when initializing weights with a "fixed distribution" ``init_fn``, such
         
     | 
| 356 | 
         
            +
                as "normal". Setting this to None means values are not cutoff.
         
     | 
| 357 | 
         
            +
                """
         
     | 
| 358 | 
         
            +
             
     | 
| 359 | 
         
            +
                precision: Optional[str] = None
         
     | 
| 360 | 
         
            +
                """
         
     | 
| 361 | 
         
            +
                Precision used to train/evaluate with. You shouldn't set this directly.
         
     | 
| 362 | 
         
            +
                See :data:`TrainConfig.precision` instead.
         
     | 
| 363 | 
         
            +
                """
         
     | 
| 364 | 
         
            +
             
     | 
| 365 | 
         
            +
                @property
         
     | 
| 366 | 
         
            +
                def effective_n_kv_heads(self) -> int:
         
     | 
| 367 | 
         
            +
                    if self.n_kv_heads is None:
         
     | 
| 368 | 
         
            +
                        if self.multi_query_attention is True:
         
     | 
| 369 | 
         
            +
                            return 1
         
     | 
| 370 | 
         
            +
                        else:
         
     | 
| 371 | 
         
            +
                            return self.n_heads
         
     | 
| 372 | 
         
            +
                    else:
         
     | 
| 373 | 
         
            +
                        if self.multi_query_attention is None:
         
     | 
| 374 | 
         
            +
                            return self.n_kv_heads
         
     | 
| 375 | 
         
            +
                        if self.multi_query_attention:
         
     | 
| 376 | 
         
            +
                            n_kv_heads_should_be = 1
         
     | 
| 377 | 
         
            +
                        else:
         
     | 
| 378 | 
         
            +
                            n_kv_heads_should_be = self.n_heads
         
     | 
| 379 | 
         
            +
                        if self.n_kv_heads == n_kv_heads_should_be:
         
     | 
| 380 | 
         
            +
                            return n_kv_heads_should_be
         
     | 
| 381 | 
         
            +
                        else:
         
     | 
| 382 | 
         
            +
                            raise Exception(
         
     | 
| 383 | 
         
            +
                                "You can't set `multi_query_attention` and `n_kv_heads` at the same time."
         
     | 
| 384 | 
         
            +
                            )
         
     | 
| 385 | 
         
            +
             
     | 
| 386 | 
         
            +
            class ActivationCheckpointingStrategy(StrEnum):
         
     | 
| 387 | 
         
            +
                whole_layer = "whole_layer"
         
     | 
| 388 | 
         
            +
                """
         
     | 
| 389 | 
         
            +
                Checkpoint every transformer layer.
         
     | 
| 390 | 
         
            +
                """
         
     | 
| 391 | 
         
            +
             
     | 
| 392 | 
         
            +
                one_in_two = "one_in_two"
         
     | 
| 393 | 
         
            +
                """
         
     | 
| 394 | 
         
            +
                Checkpoint one in two transformer layers.
         
     | 
| 395 | 
         
            +
                """
         
     | 
| 396 | 
         
            +
             
     | 
| 397 | 
         
            +
                one_in_three = "one_in_three"
         
     | 
| 398 | 
         
            +
                """
         
     | 
| 399 | 
         
            +
                Checkpoint one in three transformer layers.
         
     | 
| 400 | 
         
            +
                """
         
     | 
| 401 | 
         
            +
             
     | 
| 402 | 
         
            +
                one_in_four = "one_in_four"
         
     | 
| 403 | 
         
            +
                """
         
     | 
| 404 | 
         
            +
                Checkpoint one in four transformer layers.
         
     | 
| 405 | 
         
            +
                """
         
     | 
| 406 | 
         
            +
                
         
     | 
| 407 | 
         
            +
                two_in_three = "two_in_three"
         
     | 
| 408 | 
         
            +
                """
         
     | 
| 409 | 
         
            +
                Checkpoint two out of every three transformer layers.
         
     | 
| 410 | 
         
            +
                """
         
     | 
| 411 | 
         
            +
             
     | 
| 412 | 
         
            +
                three_in_four = "three_in_four"
         
     | 
| 413 | 
         
            +
                """
         
     | 
| 414 | 
         
            +
                Checkpoint three out of four of every transformer layers.
         
     | 
| 415 | 
         
            +
                """
         
     | 
| 416 | 
         
            +
             
     | 
| 417 | 
         
            +
                four_in_five = "four_in_five"
         
     | 
| 418 | 
         
            +
                """
         
     | 
| 419 | 
         
            +
                Checkpoint four out of five of every transformer layers.
         
     | 
| 420 | 
         
            +
                """
         
     | 
| 421 | 
         
            +
             
     | 
| 422 | 
         
            +
                nine_in_ten = "nine_in_ten"
         
     | 
| 423 | 
         
            +
                """
         
     | 
| 424 | 
         
            +
                Checkpoint nine out of ten of every transformer layers.
         
     | 
| 425 | 
         
            +
                """
         
     | 
| 426 | 
         
            +
             
     | 
| 427 | 
         
            +
                fine_grained = "fine_grained"
         
     | 
| 428 | 
         
            +
                """
         
     | 
| 429 | 
         
            +
                Focus checkpointing on where it is cheap to recompute and saves most memory.
         
     | 
| 430 | 
         
            +
                """
         
     | 
| 431 | 
         
            +
             
     | 
| 432 | 
         
            +
             
     | 
| 433 | 
         
            +
            class LLaDAConfig(PretrainedConfig):
         
     | 
| 434 | 
         
            +
                model_type = "llada"
         
     | 
| 435 | 
         
            +
                keys_to_ignore_at_inference = ["past_key_values"]  # TODO: confirm
         
     | 
| 436 | 
         
            +
             
     | 
| 437 | 
         
            +
                def __init__(self, use_cache: bool = False, **kwargs):
         
     | 
| 438 | 
         
            +
                    model_config = ModelConfig()
         
     | 
| 439 | 
         
            +
                    all_kwargs = model_config.__dict__
         
     | 
| 440 | 
         
            +
                    all_kwargs.update(kwargs)
         
     | 
| 441 | 
         
            +
                    all_kwargs.update({"use_cache": use_cache})
         
     | 
| 442 | 
         
            +
                    all_kwargs.update(
         
     | 
| 443 | 
         
            +
                        {
         
     | 
| 444 | 
         
            +
                            "architectures": all_kwargs.get("architectures", ["LLaDAModelLM"])
         
     | 
| 445 | 
         
            +
                        }
         
     | 
| 446 | 
         
            +
                    )
         
     | 
| 447 | 
         
            +
                    super().__init__(**all_kwargs)
         
     | 
| 448 | 
         
            +
             
     | 
| 449 | 
         
            +
                @property
         
     | 
| 450 | 
         
            +
                def num_attention_heads(self):
         
     | 
| 451 | 
         
            +
                    return self.n_heads
         
     | 
| 452 | 
         
            +
             
     | 
| 453 | 
         
            +
                @property
         
     | 
| 454 | 
         
            +
                def num_hidden_layers(self):
         
     | 
| 455 | 
         
            +
                    return self.n_layers
         
     | 
| 456 | 
         
            +
             
     | 
| 457 | 
         
            +
                @property
         
     | 
| 458 | 
         
            +
                def hidden_size(self):
         
     | 
| 459 | 
         
            +
                    return self.d_model
         
     | 
| 460 | 
         
            +
             
     | 
| 461 | 
         
            +
             
     | 
| 462 | 
         
            +
            # Register the config class so that it is available for transformer pipelines, auto-loading etc.
         
     | 
| 463 | 
         
            +
            AutoConfig.register("llada", LLaDAConfig)
         
     | 
    	
        generation_config.json
    ADDED
    
    | 
         @@ -0,0 +1,6 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "_from_model_config": true,
         
     | 
| 3 | 
         
            +
              "bos_token_id": 126080,
         
     | 
| 4 | 
         
            +
              "eos_token_id": 126081,
         
     | 
| 5 | 
         
            +
              "transformers_version": "4.50.3"
         
     | 
| 6 | 
         
            +
            }
         
     | 
    	
        latest
    ADDED
    
    | 
         @@ -0,0 +1 @@ 
     | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            global_step321
         
     | 
    	
        model-00001-of-00004.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:6c4f50fa160f55a9193d227afc2c3b9a2e903e2a87325c84754db3bb18e1a1b3
         
     | 
| 3 | 
         
            +
            size 4995589944
         
     | 
    	
        model-00002-of-00004.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:7c3663d9900d79a708b0f9ce3554e86186aae472de271031f45c7f95e3dc8701
         
     | 
| 3 | 
         
            +
            size 4999819552
         
     | 
    	
        model-00003-of-00004.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:cabea9e9815b1c574328cd84ee375cbc77e61dc9239da4f78dc5e15be77bd5b5
         
     | 
| 3 | 
         
            +
            size 4999802728
         
     | 
    	
        model-00004-of-00004.safetensors
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:2f5e7f9011f0620d1fea7b106334d6210f54f957985dad2680a5a76aebe50428
         
     | 
| 3 | 
         
            +
            size 1874563264
         
     | 
    	
        model.safetensors.index.json
    ADDED
    
    | 
         @@ -0,0 +1,724 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "metadata": {
         
     | 
| 3 | 
         
            +
                "total_size": 16869674048
         
     | 
| 4 | 
         
            +
              },
         
     | 
| 5 | 
         
            +
              "weight_map": {
         
     | 
| 6 | 
         
            +
                "model.image_newline": "model-00001-of-00004.safetensors",
         
     | 
| 7 | 
         
            +
                "model.mm_projector.0.bias": "model-00004-of-00004.safetensors",
         
     | 
| 8 | 
         
            +
                "model.mm_projector.0.weight": "model-00004-of-00004.safetensors",
         
     | 
| 9 | 
         
            +
                "model.mm_projector.2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 10 | 
         
            +
                "model.mm_projector.2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 11 | 
         
            +
                "model.transformer.blocks.0.attn_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 12 | 
         
            +
                "model.transformer.blocks.0.attn_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 13 | 
         
            +
                "model.transformer.blocks.0.ff_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 14 | 
         
            +
                "model.transformer.blocks.0.ff_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 15 | 
         
            +
                "model.transformer.blocks.0.ff_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 16 | 
         
            +
                "model.transformer.blocks.0.k_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 17 | 
         
            +
                "model.transformer.blocks.0.q_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 18 | 
         
            +
                "model.transformer.blocks.0.up_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 19 | 
         
            +
                "model.transformer.blocks.0.v_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 20 | 
         
            +
                "model.transformer.blocks.1.attn_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 21 | 
         
            +
                "model.transformer.blocks.1.attn_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 22 | 
         
            +
                "model.transformer.blocks.1.ff_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 23 | 
         
            +
                "model.transformer.blocks.1.ff_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 24 | 
         
            +
                "model.transformer.blocks.1.ff_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 25 | 
         
            +
                "model.transformer.blocks.1.k_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 26 | 
         
            +
                "model.transformer.blocks.1.q_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 27 | 
         
            +
                "model.transformer.blocks.1.up_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 28 | 
         
            +
                "model.transformer.blocks.1.v_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 29 | 
         
            +
                "model.transformer.blocks.10.attn_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 30 | 
         
            +
                "model.transformer.blocks.10.attn_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 31 | 
         
            +
                "model.transformer.blocks.10.ff_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 32 | 
         
            +
                "model.transformer.blocks.10.ff_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 33 | 
         
            +
                "model.transformer.blocks.10.ff_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 34 | 
         
            +
                "model.transformer.blocks.10.k_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 35 | 
         
            +
                "model.transformer.blocks.10.q_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 36 | 
         
            +
                "model.transformer.blocks.10.up_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 37 | 
         
            +
                "model.transformer.blocks.10.v_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 38 | 
         
            +
                "model.transformer.blocks.11.attn_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 39 | 
         
            +
                "model.transformer.blocks.11.attn_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 40 | 
         
            +
                "model.transformer.blocks.11.ff_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 41 | 
         
            +
                "model.transformer.blocks.11.ff_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 42 | 
         
            +
                "model.transformer.blocks.11.ff_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 43 | 
         
            +
                "model.transformer.blocks.11.k_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 44 | 
         
            +
                "model.transformer.blocks.11.q_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 45 | 
         
            +
                "model.transformer.blocks.11.up_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 46 | 
         
            +
                "model.transformer.blocks.11.v_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 47 | 
         
            +
                "model.transformer.blocks.12.attn_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 48 | 
         
            +
                "model.transformer.blocks.12.attn_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 49 | 
         
            +
                "model.transformer.blocks.12.ff_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 50 | 
         
            +
                "model.transformer.blocks.12.ff_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 51 | 
         
            +
                "model.transformer.blocks.12.ff_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 52 | 
         
            +
                "model.transformer.blocks.12.k_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 53 | 
         
            +
                "model.transformer.blocks.12.q_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 54 | 
         
            +
                "model.transformer.blocks.12.up_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 55 | 
         
            +
                "model.transformer.blocks.12.v_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 56 | 
         
            +
                "model.transformer.blocks.13.attn_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 57 | 
         
            +
                "model.transformer.blocks.13.attn_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 58 | 
         
            +
                "model.transformer.blocks.13.ff_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 59 | 
         
            +
                "model.transformer.blocks.13.ff_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 60 | 
         
            +
                "model.transformer.blocks.13.ff_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 61 | 
         
            +
                "model.transformer.blocks.13.k_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 62 | 
         
            +
                "model.transformer.blocks.13.q_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 63 | 
         
            +
                "model.transformer.blocks.13.up_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 64 | 
         
            +
                "model.transformer.blocks.13.v_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 65 | 
         
            +
                "model.transformer.blocks.14.attn_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 66 | 
         
            +
                "model.transformer.blocks.14.attn_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 67 | 
         
            +
                "model.transformer.blocks.14.ff_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 68 | 
         
            +
                "model.transformer.blocks.14.ff_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 69 | 
         
            +
                "model.transformer.blocks.14.ff_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 70 | 
         
            +
                "model.transformer.blocks.14.k_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 71 | 
         
            +
                "model.transformer.blocks.14.q_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 72 | 
         
            +
                "model.transformer.blocks.14.up_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 73 | 
         
            +
                "model.transformer.blocks.14.v_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 74 | 
         
            +
                "model.transformer.blocks.15.attn_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 75 | 
         
            +
                "model.transformer.blocks.15.attn_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 76 | 
         
            +
                "model.transformer.blocks.15.ff_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 77 | 
         
            +
                "model.transformer.blocks.15.ff_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 78 | 
         
            +
                "model.transformer.blocks.15.ff_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 79 | 
         
            +
                "model.transformer.blocks.15.k_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 80 | 
         
            +
                "model.transformer.blocks.15.q_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 81 | 
         
            +
                "model.transformer.blocks.15.up_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 82 | 
         
            +
                "model.transformer.blocks.15.v_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 83 | 
         
            +
                "model.transformer.blocks.16.attn_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 84 | 
         
            +
                "model.transformer.blocks.16.attn_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 85 | 
         
            +
                "model.transformer.blocks.16.ff_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 86 | 
         
            +
                "model.transformer.blocks.16.ff_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 87 | 
         
            +
                "model.transformer.blocks.16.ff_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 88 | 
         
            +
                "model.transformer.blocks.16.k_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 89 | 
         
            +
                "model.transformer.blocks.16.q_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 90 | 
         
            +
                "model.transformer.blocks.16.up_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 91 | 
         
            +
                "model.transformer.blocks.16.v_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 92 | 
         
            +
                "model.transformer.blocks.17.attn_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 93 | 
         
            +
                "model.transformer.blocks.17.attn_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 94 | 
         
            +
                "model.transformer.blocks.17.ff_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 95 | 
         
            +
                "model.transformer.blocks.17.ff_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 96 | 
         
            +
                "model.transformer.blocks.17.ff_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 97 | 
         
            +
                "model.transformer.blocks.17.k_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 98 | 
         
            +
                "model.transformer.blocks.17.q_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 99 | 
         
            +
                "model.transformer.blocks.17.up_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 100 | 
         
            +
                "model.transformer.blocks.17.v_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 101 | 
         
            +
                "model.transformer.blocks.18.attn_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 102 | 
         
            +
                "model.transformer.blocks.18.attn_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 103 | 
         
            +
                "model.transformer.blocks.18.ff_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 104 | 
         
            +
                "model.transformer.blocks.18.ff_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 105 | 
         
            +
                "model.transformer.blocks.18.ff_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 106 | 
         
            +
                "model.transformer.blocks.18.k_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 107 | 
         
            +
                "model.transformer.blocks.18.q_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 108 | 
         
            +
                "model.transformer.blocks.18.up_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 109 | 
         
            +
                "model.transformer.blocks.18.v_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 110 | 
         
            +
                "model.transformer.blocks.19.attn_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 111 | 
         
            +
                "model.transformer.blocks.19.attn_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 112 | 
         
            +
                "model.transformer.blocks.19.ff_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 113 | 
         
            +
                "model.transformer.blocks.19.ff_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 114 | 
         
            +
                "model.transformer.blocks.19.ff_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 115 | 
         
            +
                "model.transformer.blocks.19.k_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 116 | 
         
            +
                "model.transformer.blocks.19.q_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 117 | 
         
            +
                "model.transformer.blocks.19.up_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 118 | 
         
            +
                "model.transformer.blocks.19.v_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 119 | 
         
            +
                "model.transformer.blocks.2.attn_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 120 | 
         
            +
                "model.transformer.blocks.2.attn_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 121 | 
         
            +
                "model.transformer.blocks.2.ff_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 122 | 
         
            +
                "model.transformer.blocks.2.ff_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 123 | 
         
            +
                "model.transformer.blocks.2.ff_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 124 | 
         
            +
                "model.transformer.blocks.2.k_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 125 | 
         
            +
                "model.transformer.blocks.2.q_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 126 | 
         
            +
                "model.transformer.blocks.2.up_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 127 | 
         
            +
                "model.transformer.blocks.2.v_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 128 | 
         
            +
                "model.transformer.blocks.20.attn_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 129 | 
         
            +
                "model.transformer.blocks.20.attn_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 130 | 
         
            +
                "model.transformer.blocks.20.ff_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 131 | 
         
            +
                "model.transformer.blocks.20.ff_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 132 | 
         
            +
                "model.transformer.blocks.20.ff_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 133 | 
         
            +
                "model.transformer.blocks.20.k_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 134 | 
         
            +
                "model.transformer.blocks.20.q_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 135 | 
         
            +
                "model.transformer.blocks.20.up_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 136 | 
         
            +
                "model.transformer.blocks.20.v_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 137 | 
         
            +
                "model.transformer.blocks.21.attn_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 138 | 
         
            +
                "model.transformer.blocks.21.attn_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 139 | 
         
            +
                "model.transformer.blocks.21.ff_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 140 | 
         
            +
                "model.transformer.blocks.21.ff_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 141 | 
         
            +
                "model.transformer.blocks.21.ff_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 142 | 
         
            +
                "model.transformer.blocks.21.k_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 143 | 
         
            +
                "model.transformer.blocks.21.q_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 144 | 
         
            +
                "model.transformer.blocks.21.up_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 145 | 
         
            +
                "model.transformer.blocks.21.v_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 146 | 
         
            +
                "model.transformer.blocks.22.attn_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 147 | 
         
            +
                "model.transformer.blocks.22.attn_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 148 | 
         
            +
                "model.transformer.blocks.22.ff_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 149 | 
         
            +
                "model.transformer.blocks.22.ff_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 150 | 
         
            +
                "model.transformer.blocks.22.ff_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 151 | 
         
            +
                "model.transformer.blocks.22.k_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 152 | 
         
            +
                "model.transformer.blocks.22.q_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 153 | 
         
            +
                "model.transformer.blocks.22.up_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 154 | 
         
            +
                "model.transformer.blocks.22.v_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 155 | 
         
            +
                "model.transformer.blocks.23.attn_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 156 | 
         
            +
                "model.transformer.blocks.23.attn_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 157 | 
         
            +
                "model.transformer.blocks.23.ff_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 158 | 
         
            +
                "model.transformer.blocks.23.ff_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 159 | 
         
            +
                "model.transformer.blocks.23.ff_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 160 | 
         
            +
                "model.transformer.blocks.23.k_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 161 | 
         
            +
                "model.transformer.blocks.23.q_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 162 | 
         
            +
                "model.transformer.blocks.23.up_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 163 | 
         
            +
                "model.transformer.blocks.23.v_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 164 | 
         
            +
                "model.transformer.blocks.24.attn_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 165 | 
         
            +
                "model.transformer.blocks.24.attn_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 166 | 
         
            +
                "model.transformer.blocks.24.ff_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 167 | 
         
            +
                "model.transformer.blocks.24.ff_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 168 | 
         
            +
                "model.transformer.blocks.24.ff_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 169 | 
         
            +
                "model.transformer.blocks.24.k_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 170 | 
         
            +
                "model.transformer.blocks.24.q_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 171 | 
         
            +
                "model.transformer.blocks.24.up_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 172 | 
         
            +
                "model.transformer.blocks.24.v_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 173 | 
         
            +
                "model.transformer.blocks.25.attn_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 174 | 
         
            +
                "model.transformer.blocks.25.attn_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 175 | 
         
            +
                "model.transformer.blocks.25.ff_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 176 | 
         
            +
                "model.transformer.blocks.25.ff_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 177 | 
         
            +
                "model.transformer.blocks.25.ff_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 178 | 
         
            +
                "model.transformer.blocks.25.k_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 179 | 
         
            +
                "model.transformer.blocks.25.q_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 180 | 
         
            +
                "model.transformer.blocks.25.up_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 181 | 
         
            +
                "model.transformer.blocks.25.v_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 182 | 
         
            +
                "model.transformer.blocks.26.attn_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 183 | 
         
            +
                "model.transformer.blocks.26.attn_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 184 | 
         
            +
                "model.transformer.blocks.26.ff_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 185 | 
         
            +
                "model.transformer.blocks.26.ff_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 186 | 
         
            +
                "model.transformer.blocks.26.ff_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 187 | 
         
            +
                "model.transformer.blocks.26.k_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 188 | 
         
            +
                "model.transformer.blocks.26.q_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 189 | 
         
            +
                "model.transformer.blocks.26.up_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 190 | 
         
            +
                "model.transformer.blocks.26.v_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 191 | 
         
            +
                "model.transformer.blocks.27.attn_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 192 | 
         
            +
                "model.transformer.blocks.27.attn_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 193 | 
         
            +
                "model.transformer.blocks.27.ff_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 194 | 
         
            +
                "model.transformer.blocks.27.ff_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 195 | 
         
            +
                "model.transformer.blocks.27.ff_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 196 | 
         
            +
                "model.transformer.blocks.27.k_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 197 | 
         
            +
                "model.transformer.blocks.27.q_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 198 | 
         
            +
                "model.transformer.blocks.27.up_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 199 | 
         
            +
                "model.transformer.blocks.27.v_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 200 | 
         
            +
                "model.transformer.blocks.28.attn_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 201 | 
         
            +
                "model.transformer.blocks.28.attn_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 202 | 
         
            +
                "model.transformer.blocks.28.ff_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 203 | 
         
            +
                "model.transformer.blocks.28.ff_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 204 | 
         
            +
                "model.transformer.blocks.28.ff_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 205 | 
         
            +
                "model.transformer.blocks.28.k_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 206 | 
         
            +
                "model.transformer.blocks.28.q_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 207 | 
         
            +
                "model.transformer.blocks.28.up_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 208 | 
         
            +
                "model.transformer.blocks.28.v_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 209 | 
         
            +
                "model.transformer.blocks.29.attn_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 210 | 
         
            +
                "model.transformer.blocks.29.attn_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 211 | 
         
            +
                "model.transformer.blocks.29.ff_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 212 | 
         
            +
                "model.transformer.blocks.29.ff_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 213 | 
         
            +
                "model.transformer.blocks.29.ff_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 214 | 
         
            +
                "model.transformer.blocks.29.k_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 215 | 
         
            +
                "model.transformer.blocks.29.q_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 216 | 
         
            +
                "model.transformer.blocks.29.up_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 217 | 
         
            +
                "model.transformer.blocks.29.v_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 218 | 
         
            +
                "model.transformer.blocks.3.attn_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 219 | 
         
            +
                "model.transformer.blocks.3.attn_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 220 | 
         
            +
                "model.transformer.blocks.3.ff_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 221 | 
         
            +
                "model.transformer.blocks.3.ff_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 222 | 
         
            +
                "model.transformer.blocks.3.ff_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 223 | 
         
            +
                "model.transformer.blocks.3.k_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 224 | 
         
            +
                "model.transformer.blocks.3.q_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 225 | 
         
            +
                "model.transformer.blocks.3.up_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 226 | 
         
            +
                "model.transformer.blocks.3.v_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 227 | 
         
            +
                "model.transformer.blocks.30.attn_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 228 | 
         
            +
                "model.transformer.blocks.30.attn_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 229 | 
         
            +
                "model.transformer.blocks.30.ff_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 230 | 
         
            +
                "model.transformer.blocks.30.ff_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 231 | 
         
            +
                "model.transformer.blocks.30.ff_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 232 | 
         
            +
                "model.transformer.blocks.30.k_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 233 | 
         
            +
                "model.transformer.blocks.30.q_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 234 | 
         
            +
                "model.transformer.blocks.30.up_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 235 | 
         
            +
                "model.transformer.blocks.30.v_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 236 | 
         
            +
                "model.transformer.blocks.31.attn_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 237 | 
         
            +
                "model.transformer.blocks.31.attn_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 238 | 
         
            +
                "model.transformer.blocks.31.ff_norm.weight": "model-00003-of-00004.safetensors",
         
     | 
| 239 | 
         
            +
                "model.transformer.blocks.31.ff_out.weight": "model-00003-of-00004.safetensors",
         
     | 
| 240 | 
         
            +
                "model.transformer.blocks.31.ff_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 241 | 
         
            +
                "model.transformer.blocks.31.k_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 242 | 
         
            +
                "model.transformer.blocks.31.q_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 243 | 
         
            +
                "model.transformer.blocks.31.up_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 244 | 
         
            +
                "model.transformer.blocks.31.v_proj.weight": "model-00003-of-00004.safetensors",
         
     | 
| 245 | 
         
            +
                "model.transformer.blocks.4.attn_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 246 | 
         
            +
                "model.transformer.blocks.4.attn_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 247 | 
         
            +
                "model.transformer.blocks.4.ff_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 248 | 
         
            +
                "model.transformer.blocks.4.ff_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 249 | 
         
            +
                "model.transformer.blocks.4.ff_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 250 | 
         
            +
                "model.transformer.blocks.4.k_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 251 | 
         
            +
                "model.transformer.blocks.4.q_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 252 | 
         
            +
                "model.transformer.blocks.4.up_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 253 | 
         
            +
                "model.transformer.blocks.4.v_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 254 | 
         
            +
                "model.transformer.blocks.5.attn_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 255 | 
         
            +
                "model.transformer.blocks.5.attn_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 256 | 
         
            +
                "model.transformer.blocks.5.ff_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 257 | 
         
            +
                "model.transformer.blocks.5.ff_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 258 | 
         
            +
                "model.transformer.blocks.5.ff_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 259 | 
         
            +
                "model.transformer.blocks.5.k_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 260 | 
         
            +
                "model.transformer.blocks.5.q_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 261 | 
         
            +
                "model.transformer.blocks.5.up_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 262 | 
         
            +
                "model.transformer.blocks.5.v_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 263 | 
         
            +
                "model.transformer.blocks.6.attn_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 264 | 
         
            +
                "model.transformer.blocks.6.attn_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 265 | 
         
            +
                "model.transformer.blocks.6.ff_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 266 | 
         
            +
                "model.transformer.blocks.6.ff_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 267 | 
         
            +
                "model.transformer.blocks.6.ff_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 268 | 
         
            +
                "model.transformer.blocks.6.k_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 269 | 
         
            +
                "model.transformer.blocks.6.q_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 270 | 
         
            +
                "model.transformer.blocks.6.up_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 271 | 
         
            +
                "model.transformer.blocks.6.v_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 272 | 
         
            +
                "model.transformer.blocks.7.attn_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 273 | 
         
            +
                "model.transformer.blocks.7.attn_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 274 | 
         
            +
                "model.transformer.blocks.7.ff_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 275 | 
         
            +
                "model.transformer.blocks.7.ff_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 276 | 
         
            +
                "model.transformer.blocks.7.ff_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 277 | 
         
            +
                "model.transformer.blocks.7.k_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 278 | 
         
            +
                "model.transformer.blocks.7.q_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 279 | 
         
            +
                "model.transformer.blocks.7.up_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 280 | 
         
            +
                "model.transformer.blocks.7.v_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 281 | 
         
            +
                "model.transformer.blocks.8.attn_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 282 | 
         
            +
                "model.transformer.blocks.8.attn_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 283 | 
         
            +
                "model.transformer.blocks.8.ff_norm.weight": "model-00001-of-00004.safetensors",
         
     | 
| 284 | 
         
            +
                "model.transformer.blocks.8.ff_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 285 | 
         
            +
                "model.transformer.blocks.8.ff_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 286 | 
         
            +
                "model.transformer.blocks.8.k_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 287 | 
         
            +
                "model.transformer.blocks.8.q_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 288 | 
         
            +
                "model.transformer.blocks.8.up_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 289 | 
         
            +
                "model.transformer.blocks.8.v_proj.weight": "model-00001-of-00004.safetensors",
         
     | 
| 290 | 
         
            +
                "model.transformer.blocks.9.attn_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 291 | 
         
            +
                "model.transformer.blocks.9.attn_out.weight": "model-00001-of-00004.safetensors",
         
     | 
| 292 | 
         
            +
                "model.transformer.blocks.9.ff_norm.weight": "model-00002-of-00004.safetensors",
         
     | 
| 293 | 
         
            +
                "model.transformer.blocks.9.ff_out.weight": "model-00002-of-00004.safetensors",
         
     | 
| 294 | 
         
            +
                "model.transformer.blocks.9.ff_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 295 | 
         
            +
                "model.transformer.blocks.9.k_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 296 | 
         
            +
                "model.transformer.blocks.9.q_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 297 | 
         
            +
                "model.transformer.blocks.9.up_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 298 | 
         
            +
                "model.transformer.blocks.9.v_proj.weight": "model-00002-of-00004.safetensors",
         
     | 
| 299 | 
         
            +
                "model.transformer.ff_out.weight": "model-00004-of-00004.safetensors",
         
     | 
| 300 | 
         
            +
                "model.transformer.ln_f.weight": "model-00001-of-00004.safetensors",
         
     | 
| 301 | 
         
            +
                "model.transformer.wte.weight": "model-00001-of-00004.safetensors",
         
     | 
| 302 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.embeddings.patch_embedding.bias": "model-00004-of-00004.safetensors",
         
     | 
| 303 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00004-of-00004.safetensors",
         
     | 
| 304 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.embeddings.position_embedding.weight": "model-00004-of-00004.safetensors",
         
     | 
| 305 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 306 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 307 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 308 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 309 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 310 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 311 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 312 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 313 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 314 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 315 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 316 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 317 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 318 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 319 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 320 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 321 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 322 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 323 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 324 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 325 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 326 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 327 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 328 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 329 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 330 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 331 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 332 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 333 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 334 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 335 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 336 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 337 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 338 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 339 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 340 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 341 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 342 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 343 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 344 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 345 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 346 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 347 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 348 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 349 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 350 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 351 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 352 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 353 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 354 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 355 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 356 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 357 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 358 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 359 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 360 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 361 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 362 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 363 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 364 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 365 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 366 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 367 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 368 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 369 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 370 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 371 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 372 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 373 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 374 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 375 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 376 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 377 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 378 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 379 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 380 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 381 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 382 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 383 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 384 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 385 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 386 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 387 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 388 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 389 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 390 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 391 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 392 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 393 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 394 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 395 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 396 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 397 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 398 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 399 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 400 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 401 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 402 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 403 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 404 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 405 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 406 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 407 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 408 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 409 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 410 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 411 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 412 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 413 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 414 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 415 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 416 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 417 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 418 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 419 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 420 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 421 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 422 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 423 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 424 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 425 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 426 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 427 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 428 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 429 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 430 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 431 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 432 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 433 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 434 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 435 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 436 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 437 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 438 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 439 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 440 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 441 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 442 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 443 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 444 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 445 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 446 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 447 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 448 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 449 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 450 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 451 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 452 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 453 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 454 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 455 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 456 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 457 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 458 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 459 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 460 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 461 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 462 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 463 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 464 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 465 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 466 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 467 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 468 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 469 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 470 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 471 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 472 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 473 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 474 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 475 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 476 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 477 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 478 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 479 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 480 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 481 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 482 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 483 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 484 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 485 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 486 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 487 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 488 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 489 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 490 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 491 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 492 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 493 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 494 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 495 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 496 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 497 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 498 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 499 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 500 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 501 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 502 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 503 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 504 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 505 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 506 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 507 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 508 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 509 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 510 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 511 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 512 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 513 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 514 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 515 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 516 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 517 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 518 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 519 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 520 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 521 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 522 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 523 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 524 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 525 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 526 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 527 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 528 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 529 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 530 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 531 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 532 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 533 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 534 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 535 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 536 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 537 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 538 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 539 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 540 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 541 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 542 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 543 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 544 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 545 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 546 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 547 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 548 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 549 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 550 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 551 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 552 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 553 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 554 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 555 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 556 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 557 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 558 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 559 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 560 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 561 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 562 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 563 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 564 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 565 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 566 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 567 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 568 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 569 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 570 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 571 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 572 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 573 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 574 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 575 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 576 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 577 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 578 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 579 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 580 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 581 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 582 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 583 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 584 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 585 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 586 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 587 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 588 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 589 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 590 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 591 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 592 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.24.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 593 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 594 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 595 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 596 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 597 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 598 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 599 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 600 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 601 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 602 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 603 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 604 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 605 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 606 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 607 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 608 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.25.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 609 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 610 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 611 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 612 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 613 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 614 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 615 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 616 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 617 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 618 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 619 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 620 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 621 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 622 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 623 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 624 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 625 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 626 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 627 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 628 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 629 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 630 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 631 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 632 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 633 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 634 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 635 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 636 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 637 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 638 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 639 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 640 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 641 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 642 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 643 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 644 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 645 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 646 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 647 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 648 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 649 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 650 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 651 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 652 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 653 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 654 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 655 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 656 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 657 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 658 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 659 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 660 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 661 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 662 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 663 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 664 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 665 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 666 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 667 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 668 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 669 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 670 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 671 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 672 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 673 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 674 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 675 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 676 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 677 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 678 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 679 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 680 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 681 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 682 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 683 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 684 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 685 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 686 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 687 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 688 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 689 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 690 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 691 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 692 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 693 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 694 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 695 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 696 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 697 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 698 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 699 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 700 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 701 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 702 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 703 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 704 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 705 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 706 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 707 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 708 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 709 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00004-of-00004.safetensors",
         
     | 
| 710 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00004-of-00004.safetensors",
         
     | 
| 711 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00004-of-00004.safetensors",
         
     | 
| 712 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00004-of-00004.safetensors",
         
     | 
| 713 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 714 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 715 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 716 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 717 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 718 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 719 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00004-of-00004.safetensors",
         
     | 
| 720 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00004-of-00004.safetensors",
         
     | 
| 721 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.post_layernorm.bias": "model-00004-of-00004.safetensors",
         
     | 
| 722 | 
         
            +
                "model.vision_tower.vision_tower.vision_model.post_layernorm.weight": "model-00004-of-00004.safetensors"
         
     | 
| 723 | 
         
            +
              }
         
     | 
| 724 | 
         
            +
            }
         
     | 
    	
        scheduler.pt
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:e2b64c779616b4447b27dc70d231d96bd46384376b4aff094da11d9056000068
         
     | 
| 3 | 
         
            +
            size 1064
         
     | 
    	
        special_tokens_map.json
    ADDED
    
    | 
         @@ -0,0 +1,38 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "additional_special_tokens": [
         
     | 
| 3 | 
         
            +
                "<role>",
         
     | 
| 4 | 
         
            +
                "</role>",
         
     | 
| 5 | 
         
            +
                "<|arithmetic_start|>",
         
     | 
| 6 | 
         
            +
                "<|arithmetic_end|>",
         
     | 
| 7 | 
         
            +
                "<|number_start|>",
         
     | 
| 8 | 
         
            +
                "<|number_end|>"
         
     | 
| 9 | 
         
            +
              ],
         
     | 
| 10 | 
         
            +
              "bos_token": {
         
     | 
| 11 | 
         
            +
                "content": "<|startoftext|>",
         
     | 
| 12 | 
         
            +
                "lstrip": false,
         
     | 
| 13 | 
         
            +
                "normalized": false,
         
     | 
| 14 | 
         
            +
                "rstrip": false,
         
     | 
| 15 | 
         
            +
                "single_word": false
         
     | 
| 16 | 
         
            +
              },
         
     | 
| 17 | 
         
            +
              "cls_token": {
         
     | 
| 18 | 
         
            +
                "content": "[CLS]",
         
     | 
| 19 | 
         
            +
                "lstrip": false,
         
     | 
| 20 | 
         
            +
                "normalized": false,
         
     | 
| 21 | 
         
            +
                "rstrip": false,
         
     | 
| 22 | 
         
            +
                "single_word": false
         
     | 
| 23 | 
         
            +
              },
         
     | 
| 24 | 
         
            +
              "eos_token": {
         
     | 
| 25 | 
         
            +
                "content": "<|endoftext|>",
         
     | 
| 26 | 
         
            +
                "lstrip": false,
         
     | 
| 27 | 
         
            +
                "normalized": false,
         
     | 
| 28 | 
         
            +
                "rstrip": false,
         
     | 
| 29 | 
         
            +
                "single_word": false
         
     | 
| 30 | 
         
            +
              },
         
     | 
| 31 | 
         
            +
              "pad_token": {
         
     | 
| 32 | 
         
            +
                "content": "<|endoftext|>",
         
     | 
| 33 | 
         
            +
                "lstrip": false,
         
     | 
| 34 | 
         
            +
                "normalized": false,
         
     | 
| 35 | 
         
            +
                "rstrip": false,
         
     | 
| 36 | 
         
            +
                "single_word": false
         
     | 
| 37 | 
         
            +
              }
         
     | 
| 38 | 
         
            +
            }
         
     | 
    	
        tokenizer.json
    ADDED
    
    | 
         The diff for this file is too large to render. 
		See raw diff 
     | 
| 
         | 
    	
        tokenizer_config.json
    ADDED
    
    | 
         @@ -0,0 +1,2184 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "add_bos_token": false,
         
     | 
| 3 | 
         
            +
              "add_eos_token": false,
         
     | 
| 4 | 
         
            +
              "added_tokens_decoder": {
         
     | 
| 5 | 
         
            +
                "126080": {
         
     | 
| 6 | 
         
            +
                  "content": "<|startoftext|>",
         
     | 
| 7 | 
         
            +
                  "lstrip": false,
         
     | 
| 8 | 
         
            +
                  "normalized": false,
         
     | 
| 9 | 
         
            +
                  "rstrip": false,
         
     | 
| 10 | 
         
            +
                  "single_word": false,
         
     | 
| 11 | 
         
            +
                  "special": true
         
     | 
| 12 | 
         
            +
                },
         
     | 
| 13 | 
         
            +
                "126081": {
         
     | 
| 14 | 
         
            +
                  "content": "<|endoftext|>",
         
     | 
| 15 | 
         
            +
                  "lstrip": false,
         
     | 
| 16 | 
         
            +
                  "normalized": false,
         
     | 
| 17 | 
         
            +
                  "rstrip": false,
         
     | 
| 18 | 
         
            +
                  "single_word": false,
         
     | 
| 19 | 
         
            +
                  "special": true
         
     | 
| 20 | 
         
            +
                },
         
     | 
| 21 | 
         
            +
                "126082": {
         
     | 
| 22 | 
         
            +
                  "content": "[CLS]",
         
     | 
| 23 | 
         
            +
                  "lstrip": false,
         
     | 
| 24 | 
         
            +
                  "normalized": false,
         
     | 
| 25 | 
         
            +
                  "rstrip": false,
         
     | 
| 26 | 
         
            +
                  "single_word": false,
         
     | 
| 27 | 
         
            +
                  "special": true
         
     | 
| 28 | 
         
            +
                },
         
     | 
| 29 | 
         
            +
                "126083": {
         
     | 
| 30 | 
         
            +
                  "content": "[gMASK]",
         
     | 
| 31 | 
         
            +
                  "lstrip": false,
         
     | 
| 32 | 
         
            +
                  "normalized": false,
         
     | 
| 33 | 
         
            +
                  "rstrip": false,
         
     | 
| 34 | 
         
            +
                  "single_word": false,
         
     | 
| 35 | 
         
            +
                  "special": true
         
     | 
| 36 | 
         
            +
                },
         
     | 
| 37 | 
         
            +
                "126084": {
         
     | 
| 38 | 
         
            +
                  "content": "<|reserved_token_0|>",
         
     | 
| 39 | 
         
            +
                  "lstrip": false,
         
     | 
| 40 | 
         
            +
                  "normalized": false,
         
     | 
| 41 | 
         
            +
                  "rstrip": false,
         
     | 
| 42 | 
         
            +
                  "single_word": false,
         
     | 
| 43 | 
         
            +
                  "special": true
         
     | 
| 44 | 
         
            +
                },
         
     | 
| 45 | 
         
            +
                "126085": {
         
     | 
| 46 | 
         
            +
                  "content": "<|reserved_token_1|>",
         
     | 
| 47 | 
         
            +
                  "lstrip": false,
         
     | 
| 48 | 
         
            +
                  "normalized": false,
         
     | 
| 49 | 
         
            +
                  "rstrip": false,
         
     | 
| 50 | 
         
            +
                  "single_word": false,
         
     | 
| 51 | 
         
            +
                  "special": true
         
     | 
| 52 | 
         
            +
                },
         
     | 
| 53 | 
         
            +
                "126086": {
         
     | 
| 54 | 
         
            +
                  "content": "<|reserved_token_2|>",
         
     | 
| 55 | 
         
            +
                  "lstrip": false,
         
     | 
| 56 | 
         
            +
                  "normalized": false,
         
     | 
| 57 | 
         
            +
                  "rstrip": false,
         
     | 
| 58 | 
         
            +
                  "single_word": false,
         
     | 
| 59 | 
         
            +
                  "special": true
         
     | 
| 60 | 
         
            +
                },
         
     | 
| 61 | 
         
            +
                "126087": {
         
     | 
| 62 | 
         
            +
                  "content": "<|reserved_token_3|>",
         
     | 
| 63 | 
         
            +
                  "lstrip": false,
         
     | 
| 64 | 
         
            +
                  "normalized": false,
         
     | 
| 65 | 
         
            +
                  "rstrip": false,
         
     | 
| 66 | 
         
            +
                  "single_word": false,
         
     | 
| 67 | 
         
            +
                  "special": true
         
     | 
| 68 | 
         
            +
                },
         
     | 
| 69 | 
         
            +
                "126088": {
         
     | 
| 70 | 
         
            +
                  "content": "<|reserved_token_4|>",
         
     | 
| 71 | 
         
            +
                  "lstrip": false,
         
     | 
| 72 | 
         
            +
                  "normalized": false,
         
     | 
| 73 | 
         
            +
                  "rstrip": false,
         
     | 
| 74 | 
         
            +
                  "single_word": false,
         
     | 
| 75 | 
         
            +
                  "special": true
         
     | 
| 76 | 
         
            +
                },
         
     | 
| 77 | 
         
            +
                "126089": {
         
     | 
| 78 | 
         
            +
                  "content": "<|reserved_token_5|>",
         
     | 
| 79 | 
         
            +
                  "lstrip": false,
         
     | 
| 80 | 
         
            +
                  "normalized": false,
         
     | 
| 81 | 
         
            +
                  "rstrip": false,
         
     | 
| 82 | 
         
            +
                  "single_word": false,
         
     | 
| 83 | 
         
            +
                  "special": true
         
     | 
| 84 | 
         
            +
                },
         
     | 
| 85 | 
         
            +
                "126090": {
         
     | 
| 86 | 
         
            +
                  "content": "<|reserved_token_6|>",
         
     | 
| 87 | 
         
            +
                  "lstrip": false,
         
     | 
| 88 | 
         
            +
                  "normalized": false,
         
     | 
| 89 | 
         
            +
                  "rstrip": false,
         
     | 
| 90 | 
         
            +
                  "single_word": false,
         
     | 
| 91 | 
         
            +
                  "special": true
         
     | 
| 92 | 
         
            +
                },
         
     | 
| 93 | 
         
            +
                "126091": {
         
     | 
| 94 | 
         
            +
                  "content": "<|reserved_token_7|>",
         
     | 
| 95 | 
         
            +
                  "lstrip": false,
         
     | 
| 96 | 
         
            +
                  "normalized": false,
         
     | 
| 97 | 
         
            +
                  "rstrip": false,
         
     | 
| 98 | 
         
            +
                  "single_word": false,
         
     | 
| 99 | 
         
            +
                  "special": true
         
     | 
| 100 | 
         
            +
                },
         
     | 
| 101 | 
         
            +
                "126092": {
         
     | 
| 102 | 
         
            +
                  "content": "<|reserved_token_8|>",
         
     | 
| 103 | 
         
            +
                  "lstrip": false,
         
     | 
| 104 | 
         
            +
                  "normalized": false,
         
     | 
| 105 | 
         
            +
                  "rstrip": false,
         
     | 
| 106 | 
         
            +
                  "single_word": false,
         
     | 
| 107 | 
         
            +
                  "special": true
         
     | 
| 108 | 
         
            +
                },
         
     | 
| 109 | 
         
            +
                "126093": {
         
     | 
| 110 | 
         
            +
                  "content": "<|reserved_token_9|>",
         
     | 
| 111 | 
         
            +
                  "lstrip": false,
         
     | 
| 112 | 
         
            +
                  "normalized": false,
         
     | 
| 113 | 
         
            +
                  "rstrip": false,
         
     | 
| 114 | 
         
            +
                  "single_word": false,
         
     | 
| 115 | 
         
            +
                  "special": true
         
     | 
| 116 | 
         
            +
                },
         
     | 
| 117 | 
         
            +
                "126094": {
         
     | 
| 118 | 
         
            +
                  "content": "<|reserved_token_10|>",
         
     | 
| 119 | 
         
            +
                  "lstrip": false,
         
     | 
| 120 | 
         
            +
                  "normalized": false,
         
     | 
| 121 | 
         
            +
                  "rstrip": false,
         
     | 
| 122 | 
         
            +
                  "single_word": false,
         
     | 
| 123 | 
         
            +
                  "special": true
         
     | 
| 124 | 
         
            +
                },
         
     | 
| 125 | 
         
            +
                "126095": {
         
     | 
| 126 | 
         
            +
                  "content": "<|reserved_token_11|>",
         
     | 
| 127 | 
         
            +
                  "lstrip": false,
         
     | 
| 128 | 
         
            +
                  "normalized": false,
         
     | 
| 129 | 
         
            +
                  "rstrip": false,
         
     | 
| 130 | 
         
            +
                  "single_word": false,
         
     | 
| 131 | 
         
            +
                  "special": true
         
     | 
| 132 | 
         
            +
                },
         
     | 
| 133 | 
         
            +
                "126096": {
         
     | 
| 134 | 
         
            +
                  "content": "<|reserved_token_12|>",
         
     | 
| 135 | 
         
            +
                  "lstrip": false,
         
     | 
| 136 | 
         
            +
                  "normalized": false,
         
     | 
| 137 | 
         
            +
                  "rstrip": false,
         
     | 
| 138 | 
         
            +
                  "single_word": false,
         
     | 
| 139 | 
         
            +
                  "special": true
         
     | 
| 140 | 
         
            +
                },
         
     | 
| 141 | 
         
            +
                "126097": {
         
     | 
| 142 | 
         
            +
                  "content": "<|reserved_token_13|>",
         
     | 
| 143 | 
         
            +
                  "lstrip": false,
         
     | 
| 144 | 
         
            +
                  "normalized": false,
         
     | 
| 145 | 
         
            +
                  "rstrip": false,
         
     | 
| 146 | 
         
            +
                  "single_word": false,
         
     | 
| 147 | 
         
            +
                  "special": true
         
     | 
| 148 | 
         
            +
                },
         
     | 
| 149 | 
         
            +
                "126098": {
         
     | 
| 150 | 
         
            +
                  "content": "<|reserved_token_14|>",
         
     | 
| 151 | 
         
            +
                  "lstrip": false,
         
     | 
| 152 | 
         
            +
                  "normalized": false,
         
     | 
| 153 | 
         
            +
                  "rstrip": false,
         
     | 
| 154 | 
         
            +
                  "single_word": false,
         
     | 
| 155 | 
         
            +
                  "special": true
         
     | 
| 156 | 
         
            +
                },
         
     | 
| 157 | 
         
            +
                "126099": {
         
     | 
| 158 | 
         
            +
                  "content": "<|reserved_token_15|>",
         
     | 
| 159 | 
         
            +
                  "lstrip": false,
         
     | 
| 160 | 
         
            +
                  "normalized": false,
         
     | 
| 161 | 
         
            +
                  "rstrip": false,
         
     | 
| 162 | 
         
            +
                  "single_word": false,
         
     | 
| 163 | 
         
            +
                  "special": true
         
     | 
| 164 | 
         
            +
                },
         
     | 
| 165 | 
         
            +
                "126100": {
         
     | 
| 166 | 
         
            +
                  "content": "<|reserved_token_16|>",
         
     | 
| 167 | 
         
            +
                  "lstrip": false,
         
     | 
| 168 | 
         
            +
                  "normalized": false,
         
     | 
| 169 | 
         
            +
                  "rstrip": false,
         
     | 
| 170 | 
         
            +
                  "single_word": false,
         
     | 
| 171 | 
         
            +
                  "special": true
         
     | 
| 172 | 
         
            +
                },
         
     | 
| 173 | 
         
            +
                "126101": {
         
     | 
| 174 | 
         
            +
                  "content": "<|reserved_token_17|>",
         
     | 
| 175 | 
         
            +
                  "lstrip": false,
         
     | 
| 176 | 
         
            +
                  "normalized": false,
         
     | 
| 177 | 
         
            +
                  "rstrip": false,
         
     | 
| 178 | 
         
            +
                  "single_word": false,
         
     | 
| 179 | 
         
            +
                  "special": true
         
     | 
| 180 | 
         
            +
                },
         
     | 
| 181 | 
         
            +
                "126102": {
         
     | 
| 182 | 
         
            +
                  "content": "<|reserved_token_18|>",
         
     | 
| 183 | 
         
            +
                  "lstrip": false,
         
     | 
| 184 | 
         
            +
                  "normalized": false,
         
     | 
| 185 | 
         
            +
                  "rstrip": false,
         
     | 
| 186 | 
         
            +
                  "single_word": false,
         
     | 
| 187 | 
         
            +
                  "special": true
         
     | 
| 188 | 
         
            +
                },
         
     | 
| 189 | 
         
            +
                "126103": {
         
     | 
| 190 | 
         
            +
                  "content": "<|reserved_token_19|>",
         
     | 
| 191 | 
         
            +
                  "lstrip": false,
         
     | 
| 192 | 
         
            +
                  "normalized": false,
         
     | 
| 193 | 
         
            +
                  "rstrip": false,
         
     | 
| 194 | 
         
            +
                  "single_word": false,
         
     | 
| 195 | 
         
            +
                  "special": true
         
     | 
| 196 | 
         
            +
                },
         
     | 
| 197 | 
         
            +
                "126104": {
         
     | 
| 198 | 
         
            +
                  "content": "<|reserved_token_20|>",
         
     | 
| 199 | 
         
            +
                  "lstrip": false,
         
     | 
| 200 | 
         
            +
                  "normalized": false,
         
     | 
| 201 | 
         
            +
                  "rstrip": false,
         
     | 
| 202 | 
         
            +
                  "single_word": false,
         
     | 
| 203 | 
         
            +
                  "special": true
         
     | 
| 204 | 
         
            +
                },
         
     | 
| 205 | 
         
            +
                "126105": {
         
     | 
| 206 | 
         
            +
                  "content": "<|reserved_token_21|>",
         
     | 
| 207 | 
         
            +
                  "lstrip": false,
         
     | 
| 208 | 
         
            +
                  "normalized": false,
         
     | 
| 209 | 
         
            +
                  "rstrip": false,
         
     | 
| 210 | 
         
            +
                  "single_word": false,
         
     | 
| 211 | 
         
            +
                  "special": true
         
     | 
| 212 | 
         
            +
                },
         
     | 
| 213 | 
         
            +
                "126106": {
         
     | 
| 214 | 
         
            +
                  "content": "<|reserved_token_22|>",
         
     | 
| 215 | 
         
            +
                  "lstrip": false,
         
     | 
| 216 | 
         
            +
                  "normalized": false,
         
     | 
| 217 | 
         
            +
                  "rstrip": false,
         
     | 
| 218 | 
         
            +
                  "single_word": false,
         
     | 
| 219 | 
         
            +
                  "special": true
         
     | 
| 220 | 
         
            +
                },
         
     | 
| 221 | 
         
            +
                "126107": {
         
     | 
| 222 | 
         
            +
                  "content": "<|reserved_token_23|>",
         
     | 
| 223 | 
         
            +
                  "lstrip": false,
         
     | 
| 224 | 
         
            +
                  "normalized": false,
         
     | 
| 225 | 
         
            +
                  "rstrip": false,
         
     | 
| 226 | 
         
            +
                  "single_word": false,
         
     | 
| 227 | 
         
            +
                  "special": true
         
     | 
| 228 | 
         
            +
                },
         
     | 
| 229 | 
         
            +
                "126108": {
         
     | 
| 230 | 
         
            +
                  "content": "<|reserved_token_24|>",
         
     | 
| 231 | 
         
            +
                  "lstrip": false,
         
     | 
| 232 | 
         
            +
                  "normalized": false,
         
     | 
| 233 | 
         
            +
                  "rstrip": false,
         
     | 
| 234 | 
         
            +
                  "single_word": false,
         
     | 
| 235 | 
         
            +
                  "special": true
         
     | 
| 236 | 
         
            +
                },
         
     | 
| 237 | 
         
            +
                "126109": {
         
     | 
| 238 | 
         
            +
                  "content": "<|reserved_token_25|>",
         
     | 
| 239 | 
         
            +
                  "lstrip": false,
         
     | 
| 240 | 
         
            +
                  "normalized": false,
         
     | 
| 241 | 
         
            +
                  "rstrip": false,
         
     | 
| 242 | 
         
            +
                  "single_word": false,
         
     | 
| 243 | 
         
            +
                  "special": true
         
     | 
| 244 | 
         
            +
                },
         
     | 
| 245 | 
         
            +
                "126110": {
         
     | 
| 246 | 
         
            +
                  "content": "<|reserved_token_26|>",
         
     | 
| 247 | 
         
            +
                  "lstrip": false,
         
     | 
| 248 | 
         
            +
                  "normalized": false,
         
     | 
| 249 | 
         
            +
                  "rstrip": false,
         
     | 
| 250 | 
         
            +
                  "single_word": false,
         
     | 
| 251 | 
         
            +
                  "special": true
         
     | 
| 252 | 
         
            +
                },
         
     | 
| 253 | 
         
            +
                "126111": {
         
     | 
| 254 | 
         
            +
                  "content": "<|reserved_token_27|>",
         
     | 
| 255 | 
         
            +
                  "lstrip": false,
         
     | 
| 256 | 
         
            +
                  "normalized": false,
         
     | 
| 257 | 
         
            +
                  "rstrip": false,
         
     | 
| 258 | 
         
            +
                  "single_word": false,
         
     | 
| 259 | 
         
            +
                  "special": true
         
     | 
| 260 | 
         
            +
                },
         
     | 
| 261 | 
         
            +
                "126112": {
         
     | 
| 262 | 
         
            +
                  "content": "<|reserved_token_28|>",
         
     | 
| 263 | 
         
            +
                  "lstrip": false,
         
     | 
| 264 | 
         
            +
                  "normalized": false,
         
     | 
| 265 | 
         
            +
                  "rstrip": false,
         
     | 
| 266 | 
         
            +
                  "single_word": false,
         
     | 
| 267 | 
         
            +
                  "special": true
         
     | 
| 268 | 
         
            +
                },
         
     | 
| 269 | 
         
            +
                "126113": {
         
     | 
| 270 | 
         
            +
                  "content": "<|reserved_token_29|>",
         
     | 
| 271 | 
         
            +
                  "lstrip": false,
         
     | 
| 272 | 
         
            +
                  "normalized": false,
         
     | 
| 273 | 
         
            +
                  "rstrip": false,
         
     | 
| 274 | 
         
            +
                  "single_word": false,
         
     | 
| 275 | 
         
            +
                  "special": true
         
     | 
| 276 | 
         
            +
                },
         
     | 
| 277 | 
         
            +
                "126114": {
         
     | 
| 278 | 
         
            +
                  "content": "<|reserved_token_30|>",
         
     | 
| 279 | 
         
            +
                  "lstrip": false,
         
     | 
| 280 | 
         
            +
                  "normalized": false,
         
     | 
| 281 | 
         
            +
                  "rstrip": false,
         
     | 
| 282 | 
         
            +
                  "single_word": false,
         
     | 
| 283 | 
         
            +
                  "special": true
         
     | 
| 284 | 
         
            +
                },
         
     | 
| 285 | 
         
            +
                "126115": {
         
     | 
| 286 | 
         
            +
                  "content": "<|reserved_token_31|>",
         
     | 
| 287 | 
         
            +
                  "lstrip": false,
         
     | 
| 288 | 
         
            +
                  "normalized": false,
         
     | 
| 289 | 
         
            +
                  "rstrip": false,
         
     | 
| 290 | 
         
            +
                  "single_word": false,
         
     | 
| 291 | 
         
            +
                  "special": true
         
     | 
| 292 | 
         
            +
                },
         
     | 
| 293 | 
         
            +
                "126116": {
         
     | 
| 294 | 
         
            +
                  "content": "<|reserved_token_32|>",
         
     | 
| 295 | 
         
            +
                  "lstrip": false,
         
     | 
| 296 | 
         
            +
                  "normalized": false,
         
     | 
| 297 | 
         
            +
                  "rstrip": false,
         
     | 
| 298 | 
         
            +
                  "single_word": false,
         
     | 
| 299 | 
         
            +
                  "special": true
         
     | 
| 300 | 
         
            +
                },
         
     | 
| 301 | 
         
            +
                "126117": {
         
     | 
| 302 | 
         
            +
                  "content": "<|reserved_token_33|>",
         
     | 
| 303 | 
         
            +
                  "lstrip": false,
         
     | 
| 304 | 
         
            +
                  "normalized": false,
         
     | 
| 305 | 
         
            +
                  "rstrip": false,
         
     | 
| 306 | 
         
            +
                  "single_word": false,
         
     | 
| 307 | 
         
            +
                  "special": true
         
     | 
| 308 | 
         
            +
                },
         
     | 
| 309 | 
         
            +
                "126118": {
         
     | 
| 310 | 
         
            +
                  "content": "<|reserved_token_34|>",
         
     | 
| 311 | 
         
            +
                  "lstrip": false,
         
     | 
| 312 | 
         
            +
                  "normalized": false,
         
     | 
| 313 | 
         
            +
                  "rstrip": false,
         
     | 
| 314 | 
         
            +
                  "single_word": false,
         
     | 
| 315 | 
         
            +
                  "special": true
         
     | 
| 316 | 
         
            +
                },
         
     | 
| 317 | 
         
            +
                "126119": {
         
     | 
| 318 | 
         
            +
                  "content": "<|reserved_token_35|>",
         
     | 
| 319 | 
         
            +
                  "lstrip": false,
         
     | 
| 320 | 
         
            +
                  "normalized": false,
         
     | 
| 321 | 
         
            +
                  "rstrip": false,
         
     | 
| 322 | 
         
            +
                  "single_word": false,
         
     | 
| 323 | 
         
            +
                  "special": true
         
     | 
| 324 | 
         
            +
                },
         
     | 
| 325 | 
         
            +
                "126120": {
         
     | 
| 326 | 
         
            +
                  "content": "<|reserved_token_36|>",
         
     | 
| 327 | 
         
            +
                  "lstrip": false,
         
     | 
| 328 | 
         
            +
                  "normalized": false,
         
     | 
| 329 | 
         
            +
                  "rstrip": false,
         
     | 
| 330 | 
         
            +
                  "single_word": false,
         
     | 
| 331 | 
         
            +
                  "special": true
         
     | 
| 332 | 
         
            +
                },
         
     | 
| 333 | 
         
            +
                "126121": {
         
     | 
| 334 | 
         
            +
                  "content": "<|reserved_token_37|>",
         
     | 
| 335 | 
         
            +
                  "lstrip": false,
         
     | 
| 336 | 
         
            +
                  "normalized": false,
         
     | 
| 337 | 
         
            +
                  "rstrip": false,
         
     | 
| 338 | 
         
            +
                  "single_word": false,
         
     | 
| 339 | 
         
            +
                  "special": true
         
     | 
| 340 | 
         
            +
                },
         
     | 
| 341 | 
         
            +
                "126122": {
         
     | 
| 342 | 
         
            +
                  "content": "<|reserved_token_38|>",
         
     | 
| 343 | 
         
            +
                  "lstrip": false,
         
     | 
| 344 | 
         
            +
                  "normalized": false,
         
     | 
| 345 | 
         
            +
                  "rstrip": false,
         
     | 
| 346 | 
         
            +
                  "single_word": false,
         
     | 
| 347 | 
         
            +
                  "special": true
         
     | 
| 348 | 
         
            +
                },
         
     | 
| 349 | 
         
            +
                "126123": {
         
     | 
| 350 | 
         
            +
                  "content": "<|reserved_token_39|>",
         
     | 
| 351 | 
         
            +
                  "lstrip": false,
         
     | 
| 352 | 
         
            +
                  "normalized": false,
         
     | 
| 353 | 
         
            +
                  "rstrip": false,
         
     | 
| 354 | 
         
            +
                  "single_word": false,
         
     | 
| 355 | 
         
            +
                  "special": true
         
     | 
| 356 | 
         
            +
                },
         
     | 
| 357 | 
         
            +
                "126124": {
         
     | 
| 358 | 
         
            +
                  "content": "<|reserved_token_40|>",
         
     | 
| 359 | 
         
            +
                  "lstrip": false,
         
     | 
| 360 | 
         
            +
                  "normalized": false,
         
     | 
| 361 | 
         
            +
                  "rstrip": false,
         
     | 
| 362 | 
         
            +
                  "single_word": false,
         
     | 
| 363 | 
         
            +
                  "special": true
         
     | 
| 364 | 
         
            +
                },
         
     | 
| 365 | 
         
            +
                "126125": {
         
     | 
| 366 | 
         
            +
                  "content": "<|reserved_token_41|>",
         
     | 
| 367 | 
         
            +
                  "lstrip": false,
         
     | 
| 368 | 
         
            +
                  "normalized": false,
         
     | 
| 369 | 
         
            +
                  "rstrip": false,
         
     | 
| 370 | 
         
            +
                  "single_word": false,
         
     | 
| 371 | 
         
            +
                  "special": true
         
     | 
| 372 | 
         
            +
                },
         
     | 
| 373 | 
         
            +
                "126126": {
         
     | 
| 374 | 
         
            +
                  "content": "<|reserved_token_42|>",
         
     | 
| 375 | 
         
            +
                  "lstrip": false,
         
     | 
| 376 | 
         
            +
                  "normalized": false,
         
     | 
| 377 | 
         
            +
                  "rstrip": false,
         
     | 
| 378 | 
         
            +
                  "single_word": false,
         
     | 
| 379 | 
         
            +
                  "special": true
         
     | 
| 380 | 
         
            +
                },
         
     | 
| 381 | 
         
            +
                "126127": {
         
     | 
| 382 | 
         
            +
                  "content": "<|reserved_token_43|>",
         
     | 
| 383 | 
         
            +
                  "lstrip": false,
         
     | 
| 384 | 
         
            +
                  "normalized": false,
         
     | 
| 385 | 
         
            +
                  "rstrip": false,
         
     | 
| 386 | 
         
            +
                  "single_word": false,
         
     | 
| 387 | 
         
            +
                  "special": true
         
     | 
| 388 | 
         
            +
                },
         
     | 
| 389 | 
         
            +
                "126128": {
         
     | 
| 390 | 
         
            +
                  "content": "<|reserved_token_44|>",
         
     | 
| 391 | 
         
            +
                  "lstrip": false,
         
     | 
| 392 | 
         
            +
                  "normalized": false,
         
     | 
| 393 | 
         
            +
                  "rstrip": false,
         
     | 
| 394 | 
         
            +
                  "single_word": false,
         
     | 
| 395 | 
         
            +
                  "special": true
         
     | 
| 396 | 
         
            +
                },
         
     | 
| 397 | 
         
            +
                "126129": {
         
     | 
| 398 | 
         
            +
                  "content": "<|reserved_token_45|>",
         
     | 
| 399 | 
         
            +
                  "lstrip": false,
         
     | 
| 400 | 
         
            +
                  "normalized": false,
         
     | 
| 401 | 
         
            +
                  "rstrip": false,
         
     | 
| 402 | 
         
            +
                  "single_word": false,
         
     | 
| 403 | 
         
            +
                  "special": true
         
     | 
| 404 | 
         
            +
                },
         
     | 
| 405 | 
         
            +
                "126130": {
         
     | 
| 406 | 
         
            +
                  "content": "<|reserved_token_46|>",
         
     | 
| 407 | 
         
            +
                  "lstrip": false,
         
     | 
| 408 | 
         
            +
                  "normalized": false,
         
     | 
| 409 | 
         
            +
                  "rstrip": false,
         
     | 
| 410 | 
         
            +
                  "single_word": false,
         
     | 
| 411 | 
         
            +
                  "special": true
         
     | 
| 412 | 
         
            +
                },
         
     | 
| 413 | 
         
            +
                "126131": {
         
     | 
| 414 | 
         
            +
                  "content": "<|reserved_token_47|>",
         
     | 
| 415 | 
         
            +
                  "lstrip": false,
         
     | 
| 416 | 
         
            +
                  "normalized": false,
         
     | 
| 417 | 
         
            +
                  "rstrip": false,
         
     | 
| 418 | 
         
            +
                  "single_word": false,
         
     | 
| 419 | 
         
            +
                  "special": true
         
     | 
| 420 | 
         
            +
                },
         
     | 
| 421 | 
         
            +
                "126132": {
         
     | 
| 422 | 
         
            +
                  "content": "<|reserved_token_48|>",
         
     | 
| 423 | 
         
            +
                  "lstrip": false,
         
     | 
| 424 | 
         
            +
                  "normalized": false,
         
     | 
| 425 | 
         
            +
                  "rstrip": false,
         
     | 
| 426 | 
         
            +
                  "single_word": false,
         
     | 
| 427 | 
         
            +
                  "special": true
         
     | 
| 428 | 
         
            +
                },
         
     | 
| 429 | 
         
            +
                "126133": {
         
     | 
| 430 | 
         
            +
                  "content": "<|reserved_token_49|>",
         
     | 
| 431 | 
         
            +
                  "lstrip": false,
         
     | 
| 432 | 
         
            +
                  "normalized": false,
         
     | 
| 433 | 
         
            +
                  "rstrip": false,
         
     | 
| 434 | 
         
            +
                  "single_word": false,
         
     | 
| 435 | 
         
            +
                  "special": true
         
     | 
| 436 | 
         
            +
                },
         
     | 
| 437 | 
         
            +
                "126134": {
         
     | 
| 438 | 
         
            +
                  "content": "<|reserved_token_50|>",
         
     | 
| 439 | 
         
            +
                  "lstrip": false,
         
     | 
| 440 | 
         
            +
                  "normalized": false,
         
     | 
| 441 | 
         
            +
                  "rstrip": false,
         
     | 
| 442 | 
         
            +
                  "single_word": false,
         
     | 
| 443 | 
         
            +
                  "special": true
         
     | 
| 444 | 
         
            +
                },
         
     | 
| 445 | 
         
            +
                "126135": {
         
     | 
| 446 | 
         
            +
                  "content": "<|reserved_token_51|>",
         
     | 
| 447 | 
         
            +
                  "lstrip": false,
         
     | 
| 448 | 
         
            +
                  "normalized": false,
         
     | 
| 449 | 
         
            +
                  "rstrip": false,
         
     | 
| 450 | 
         
            +
                  "single_word": false,
         
     | 
| 451 | 
         
            +
                  "special": true
         
     | 
| 452 | 
         
            +
                },
         
     | 
| 453 | 
         
            +
                "126136": {
         
     | 
| 454 | 
         
            +
                  "content": "<|reserved_token_52|>",
         
     | 
| 455 | 
         
            +
                  "lstrip": false,
         
     | 
| 456 | 
         
            +
                  "normalized": false,
         
     | 
| 457 | 
         
            +
                  "rstrip": false,
         
     | 
| 458 | 
         
            +
                  "single_word": false,
         
     | 
| 459 | 
         
            +
                  "special": true
         
     | 
| 460 | 
         
            +
                },
         
     | 
| 461 | 
         
            +
                "126137": {
         
     | 
| 462 | 
         
            +
                  "content": "<|reserved_token_53|>",
         
     | 
| 463 | 
         
            +
                  "lstrip": false,
         
     | 
| 464 | 
         
            +
                  "normalized": false,
         
     | 
| 465 | 
         
            +
                  "rstrip": false,
         
     | 
| 466 | 
         
            +
                  "single_word": false,
         
     | 
| 467 | 
         
            +
                  "special": true
         
     | 
| 468 | 
         
            +
                },
         
     | 
| 469 | 
         
            +
                "126138": {
         
     | 
| 470 | 
         
            +
                  "content": "<|reserved_token_54|>",
         
     | 
| 471 | 
         
            +
                  "lstrip": false,
         
     | 
| 472 | 
         
            +
                  "normalized": false,
         
     | 
| 473 | 
         
            +
                  "rstrip": false,
         
     | 
| 474 | 
         
            +
                  "single_word": false,
         
     | 
| 475 | 
         
            +
                  "special": true
         
     | 
| 476 | 
         
            +
                },
         
     | 
| 477 | 
         
            +
                "126139": {
         
     | 
| 478 | 
         
            +
                  "content": "<|reserved_token_55|>",
         
     | 
| 479 | 
         
            +
                  "lstrip": false,
         
     | 
| 480 | 
         
            +
                  "normalized": false,
         
     | 
| 481 | 
         
            +
                  "rstrip": false,
         
     | 
| 482 | 
         
            +
                  "single_word": false,
         
     | 
| 483 | 
         
            +
                  "special": true
         
     | 
| 484 | 
         
            +
                },
         
     | 
| 485 | 
         
            +
                "126140": {
         
     | 
| 486 | 
         
            +
                  "content": "<|reserved_token_56|>",
         
     | 
| 487 | 
         
            +
                  "lstrip": false,
         
     | 
| 488 | 
         
            +
                  "normalized": false,
         
     | 
| 489 | 
         
            +
                  "rstrip": false,
         
     | 
| 490 | 
         
            +
                  "single_word": false,
         
     | 
| 491 | 
         
            +
                  "special": true
         
     | 
| 492 | 
         
            +
                },
         
     | 
| 493 | 
         
            +
                "126141": {
         
     | 
| 494 | 
         
            +
                  "content": "<|reserved_token_57|>",
         
     | 
| 495 | 
         
            +
                  "lstrip": false,
         
     | 
| 496 | 
         
            +
                  "normalized": false,
         
     | 
| 497 | 
         
            +
                  "rstrip": false,
         
     | 
| 498 | 
         
            +
                  "single_word": false,
         
     | 
| 499 | 
         
            +
                  "special": true
         
     | 
| 500 | 
         
            +
                },
         
     | 
| 501 | 
         
            +
                "126142": {
         
     | 
| 502 | 
         
            +
                  "content": "<|reserved_token_58|>",
         
     | 
| 503 | 
         
            +
                  "lstrip": false,
         
     | 
| 504 | 
         
            +
                  "normalized": false,
         
     | 
| 505 | 
         
            +
                  "rstrip": false,
         
     | 
| 506 | 
         
            +
                  "single_word": false,
         
     | 
| 507 | 
         
            +
                  "special": true
         
     | 
| 508 | 
         
            +
                },
         
     | 
| 509 | 
         
            +
                "126143": {
         
     | 
| 510 | 
         
            +
                  "content": "<|reserved_token_59|>",
         
     | 
| 511 | 
         
            +
                  "lstrip": false,
         
     | 
| 512 | 
         
            +
                  "normalized": false,
         
     | 
| 513 | 
         
            +
                  "rstrip": false,
         
     | 
| 514 | 
         
            +
                  "single_word": false,
         
     | 
| 515 | 
         
            +
                  "special": true
         
     | 
| 516 | 
         
            +
                },
         
     | 
| 517 | 
         
            +
                "126144": {
         
     | 
| 518 | 
         
            +
                  "content": "<|reserved_token_60|>",
         
     | 
| 519 | 
         
            +
                  "lstrip": false,
         
     | 
| 520 | 
         
            +
                  "normalized": false,
         
     | 
| 521 | 
         
            +
                  "rstrip": false,
         
     | 
| 522 | 
         
            +
                  "single_word": false,
         
     | 
| 523 | 
         
            +
                  "special": true
         
     | 
| 524 | 
         
            +
                },
         
     | 
| 525 | 
         
            +
                "126145": {
         
     | 
| 526 | 
         
            +
                  "content": "<|reserved_token_61|>",
         
     | 
| 527 | 
         
            +
                  "lstrip": false,
         
     | 
| 528 | 
         
            +
                  "normalized": false,
         
     | 
| 529 | 
         
            +
                  "rstrip": false,
         
     | 
| 530 | 
         
            +
                  "single_word": false,
         
     | 
| 531 | 
         
            +
                  "special": true
         
     | 
| 532 | 
         
            +
                },
         
     | 
| 533 | 
         
            +
                "126146": {
         
     | 
| 534 | 
         
            +
                  "content": "<|reserved_token_62|>",
         
     | 
| 535 | 
         
            +
                  "lstrip": false,
         
     | 
| 536 | 
         
            +
                  "normalized": false,
         
     | 
| 537 | 
         
            +
                  "rstrip": false,
         
     | 
| 538 | 
         
            +
                  "single_word": false,
         
     | 
| 539 | 
         
            +
                  "special": true
         
     | 
| 540 | 
         
            +
                },
         
     | 
| 541 | 
         
            +
                "126147": {
         
     | 
| 542 | 
         
            +
                  "content": "<|reserved_token_63|>",
         
     | 
| 543 | 
         
            +
                  "lstrip": false,
         
     | 
| 544 | 
         
            +
                  "normalized": false,
         
     | 
| 545 | 
         
            +
                  "rstrip": false,
         
     | 
| 546 | 
         
            +
                  "single_word": false,
         
     | 
| 547 | 
         
            +
                  "special": true
         
     | 
| 548 | 
         
            +
                },
         
     | 
| 549 | 
         
            +
                "126148": {
         
     | 
| 550 | 
         
            +
                  "content": "<|reserved_token_64|>",
         
     | 
| 551 | 
         
            +
                  "lstrip": false,
         
     | 
| 552 | 
         
            +
                  "normalized": false,
         
     | 
| 553 | 
         
            +
                  "rstrip": false,
         
     | 
| 554 | 
         
            +
                  "single_word": false,
         
     | 
| 555 | 
         
            +
                  "special": true
         
     | 
| 556 | 
         
            +
                },
         
     | 
| 557 | 
         
            +
                "126149": {
         
     | 
| 558 | 
         
            +
                  "content": "<|reserved_token_65|>",
         
     | 
| 559 | 
         
            +
                  "lstrip": false,
         
     | 
| 560 | 
         
            +
                  "normalized": false,
         
     | 
| 561 | 
         
            +
                  "rstrip": false,
         
     | 
| 562 | 
         
            +
                  "single_word": false,
         
     | 
| 563 | 
         
            +
                  "special": true
         
     | 
| 564 | 
         
            +
                },
         
     | 
| 565 | 
         
            +
                "126150": {
         
     | 
| 566 | 
         
            +
                  "content": "<|reserved_token_66|>",
         
     | 
| 567 | 
         
            +
                  "lstrip": false,
         
     | 
| 568 | 
         
            +
                  "normalized": false,
         
     | 
| 569 | 
         
            +
                  "rstrip": false,
         
     | 
| 570 | 
         
            +
                  "single_word": false,
         
     | 
| 571 | 
         
            +
                  "special": true
         
     | 
| 572 | 
         
            +
                },
         
     | 
| 573 | 
         
            +
                "126151": {
         
     | 
| 574 | 
         
            +
                  "content": "<|reserved_token_67|>",
         
     | 
| 575 | 
         
            +
                  "lstrip": false,
         
     | 
| 576 | 
         
            +
                  "normalized": false,
         
     | 
| 577 | 
         
            +
                  "rstrip": false,
         
     | 
| 578 | 
         
            +
                  "single_word": false,
         
     | 
| 579 | 
         
            +
                  "special": true
         
     | 
| 580 | 
         
            +
                },
         
     | 
| 581 | 
         
            +
                "126152": {
         
     | 
| 582 | 
         
            +
                  "content": "<|reserved_token_68|>",
         
     | 
| 583 | 
         
            +
                  "lstrip": false,
         
     | 
| 584 | 
         
            +
                  "normalized": false,
         
     | 
| 585 | 
         
            +
                  "rstrip": false,
         
     | 
| 586 | 
         
            +
                  "single_word": false,
         
     | 
| 587 | 
         
            +
                  "special": true
         
     | 
| 588 | 
         
            +
                },
         
     | 
| 589 | 
         
            +
                "126153": {
         
     | 
| 590 | 
         
            +
                  "content": "<|reserved_token_69|>",
         
     | 
| 591 | 
         
            +
                  "lstrip": false,
         
     | 
| 592 | 
         
            +
                  "normalized": false,
         
     | 
| 593 | 
         
            +
                  "rstrip": false,
         
     | 
| 594 | 
         
            +
                  "single_word": false,
         
     | 
| 595 | 
         
            +
                  "special": true
         
     | 
| 596 | 
         
            +
                },
         
     | 
| 597 | 
         
            +
                "126154": {
         
     | 
| 598 | 
         
            +
                  "content": "<|reserved_token_70|>",
         
     | 
| 599 | 
         
            +
                  "lstrip": false,
         
     | 
| 600 | 
         
            +
                  "normalized": false,
         
     | 
| 601 | 
         
            +
                  "rstrip": false,
         
     | 
| 602 | 
         
            +
                  "single_word": false,
         
     | 
| 603 | 
         
            +
                  "special": true
         
     | 
| 604 | 
         
            +
                },
         
     | 
| 605 | 
         
            +
                "126155": {
         
     | 
| 606 | 
         
            +
                  "content": "<|reserved_token_71|>",
         
     | 
| 607 | 
         
            +
                  "lstrip": false,
         
     | 
| 608 | 
         
            +
                  "normalized": false,
         
     | 
| 609 | 
         
            +
                  "rstrip": false,
         
     | 
| 610 | 
         
            +
                  "single_word": false,
         
     | 
| 611 | 
         
            +
                  "special": true
         
     | 
| 612 | 
         
            +
                },
         
     | 
| 613 | 
         
            +
                "126156": {
         
     | 
| 614 | 
         
            +
                  "content": "<|reserved_token_72|>",
         
     | 
| 615 | 
         
            +
                  "lstrip": false,
         
     | 
| 616 | 
         
            +
                  "normalized": false,
         
     | 
| 617 | 
         
            +
                  "rstrip": false,
         
     | 
| 618 | 
         
            +
                  "single_word": false,
         
     | 
| 619 | 
         
            +
                  "special": true
         
     | 
| 620 | 
         
            +
                },
         
     | 
| 621 | 
         
            +
                "126157": {
         
     | 
| 622 | 
         
            +
                  "content": "<|reserved_token_73|>",
         
     | 
| 623 | 
         
            +
                  "lstrip": false,
         
     | 
| 624 | 
         
            +
                  "normalized": false,
         
     | 
| 625 | 
         
            +
                  "rstrip": false,
         
     | 
| 626 | 
         
            +
                  "single_word": false,
         
     | 
| 627 | 
         
            +
                  "special": true
         
     | 
| 628 | 
         
            +
                },
         
     | 
| 629 | 
         
            +
                "126158": {
         
     | 
| 630 | 
         
            +
                  "content": "<|reserved_token_74|>",
         
     | 
| 631 | 
         
            +
                  "lstrip": false,
         
     | 
| 632 | 
         
            +
                  "normalized": false,
         
     | 
| 633 | 
         
            +
                  "rstrip": false,
         
     | 
| 634 | 
         
            +
                  "single_word": false,
         
     | 
| 635 | 
         
            +
                  "special": true
         
     | 
| 636 | 
         
            +
                },
         
     | 
| 637 | 
         
            +
                "126159": {
         
     | 
| 638 | 
         
            +
                  "content": "<|reserved_token_75|>",
         
     | 
| 639 | 
         
            +
                  "lstrip": false,
         
     | 
| 640 | 
         
            +
                  "normalized": false,
         
     | 
| 641 | 
         
            +
                  "rstrip": false,
         
     | 
| 642 | 
         
            +
                  "single_word": false,
         
     | 
| 643 | 
         
            +
                  "special": true
         
     | 
| 644 | 
         
            +
                },
         
     | 
| 645 | 
         
            +
                "126160": {
         
     | 
| 646 | 
         
            +
                  "content": "<|reserved_token_76|>",
         
     | 
| 647 | 
         
            +
                  "lstrip": false,
         
     | 
| 648 | 
         
            +
                  "normalized": false,
         
     | 
| 649 | 
         
            +
                  "rstrip": false,
         
     | 
| 650 | 
         
            +
                  "single_word": false,
         
     | 
| 651 | 
         
            +
                  "special": true
         
     | 
| 652 | 
         
            +
                },
         
     | 
| 653 | 
         
            +
                "126161": {
         
     | 
| 654 | 
         
            +
                  "content": "<|reserved_token_77|>",
         
     | 
| 655 | 
         
            +
                  "lstrip": false,
         
     | 
| 656 | 
         
            +
                  "normalized": false,
         
     | 
| 657 | 
         
            +
                  "rstrip": false,
         
     | 
| 658 | 
         
            +
                  "single_word": false,
         
     | 
| 659 | 
         
            +
                  "special": true
         
     | 
| 660 | 
         
            +
                },
         
     | 
| 661 | 
         
            +
                "126162": {
         
     | 
| 662 | 
         
            +
                  "content": "<|reserved_token_78|>",
         
     | 
| 663 | 
         
            +
                  "lstrip": false,
         
     | 
| 664 | 
         
            +
                  "normalized": false,
         
     | 
| 665 | 
         
            +
                  "rstrip": false,
         
     | 
| 666 | 
         
            +
                  "single_word": false,
         
     | 
| 667 | 
         
            +
                  "special": true
         
     | 
| 668 | 
         
            +
                },
         
     | 
| 669 | 
         
            +
                "126163": {
         
     | 
| 670 | 
         
            +
                  "content": "<|reserved_token_79|>",
         
     | 
| 671 | 
         
            +
                  "lstrip": false,
         
     | 
| 672 | 
         
            +
                  "normalized": false,
         
     | 
| 673 | 
         
            +
                  "rstrip": false,
         
     | 
| 674 | 
         
            +
                  "single_word": false,
         
     | 
| 675 | 
         
            +
                  "special": true
         
     | 
| 676 | 
         
            +
                },
         
     | 
| 677 | 
         
            +
                "126164": {
         
     | 
| 678 | 
         
            +
                  "content": "<|reserved_token_80|>",
         
     | 
| 679 | 
         
            +
                  "lstrip": false,
         
     | 
| 680 | 
         
            +
                  "normalized": false,
         
     | 
| 681 | 
         
            +
                  "rstrip": false,
         
     | 
| 682 | 
         
            +
                  "single_word": false,
         
     | 
| 683 | 
         
            +
                  "special": true
         
     | 
| 684 | 
         
            +
                },
         
     | 
| 685 | 
         
            +
                "126165": {
         
     | 
| 686 | 
         
            +
                  "content": "<|reserved_token_81|>",
         
     | 
| 687 | 
         
            +
                  "lstrip": false,
         
     | 
| 688 | 
         
            +
                  "normalized": false,
         
     | 
| 689 | 
         
            +
                  "rstrip": false,
         
     | 
| 690 | 
         
            +
                  "single_word": false,
         
     | 
| 691 | 
         
            +
                  "special": true
         
     | 
| 692 | 
         
            +
                },
         
     | 
| 693 | 
         
            +
                "126166": {
         
     | 
| 694 | 
         
            +
                  "content": "<|reserved_token_82|>",
         
     | 
| 695 | 
         
            +
                  "lstrip": false,
         
     | 
| 696 | 
         
            +
                  "normalized": false,
         
     | 
| 697 | 
         
            +
                  "rstrip": false,
         
     | 
| 698 | 
         
            +
                  "single_word": false,
         
     | 
| 699 | 
         
            +
                  "special": true
         
     | 
| 700 | 
         
            +
                },
         
     | 
| 701 | 
         
            +
                "126167": {
         
     | 
| 702 | 
         
            +
                  "content": "<|reserved_token_83|>",
         
     | 
| 703 | 
         
            +
                  "lstrip": false,
         
     | 
| 704 | 
         
            +
                  "normalized": false,
         
     | 
| 705 | 
         
            +
                  "rstrip": false,
         
     | 
| 706 | 
         
            +
                  "single_word": false,
         
     | 
| 707 | 
         
            +
                  "special": true
         
     | 
| 708 | 
         
            +
                },
         
     | 
| 709 | 
         
            +
                "126168": {
         
     | 
| 710 | 
         
            +
                  "content": "<|reserved_token_84|>",
         
     | 
| 711 | 
         
            +
                  "lstrip": false,
         
     | 
| 712 | 
         
            +
                  "normalized": false,
         
     | 
| 713 | 
         
            +
                  "rstrip": false,
         
     | 
| 714 | 
         
            +
                  "single_word": false,
         
     | 
| 715 | 
         
            +
                  "special": true
         
     | 
| 716 | 
         
            +
                },
         
     | 
| 717 | 
         
            +
                "126169": {
         
     | 
| 718 | 
         
            +
                  "content": "<|reserved_token_85|>",
         
     | 
| 719 | 
         
            +
                  "lstrip": false,
         
     | 
| 720 | 
         
            +
                  "normalized": false,
         
     | 
| 721 | 
         
            +
                  "rstrip": false,
         
     | 
| 722 | 
         
            +
                  "single_word": false,
         
     | 
| 723 | 
         
            +
                  "special": true
         
     | 
| 724 | 
         
            +
                },
         
     | 
| 725 | 
         
            +
                "126170": {
         
     | 
| 726 | 
         
            +
                  "content": "<|reserved_token_86|>",
         
     | 
| 727 | 
         
            +
                  "lstrip": false,
         
     | 
| 728 | 
         
            +
                  "normalized": false,
         
     | 
| 729 | 
         
            +
                  "rstrip": false,
         
     | 
| 730 | 
         
            +
                  "single_word": false,
         
     | 
| 731 | 
         
            +
                  "special": true
         
     | 
| 732 | 
         
            +
                },
         
     | 
| 733 | 
         
            +
                "126171": {
         
     | 
| 734 | 
         
            +
                  "content": "<|reserved_token_87|>",
         
     | 
| 735 | 
         
            +
                  "lstrip": false,
         
     | 
| 736 | 
         
            +
                  "normalized": false,
         
     | 
| 737 | 
         
            +
                  "rstrip": false,
         
     | 
| 738 | 
         
            +
                  "single_word": false,
         
     | 
| 739 | 
         
            +
                  "special": true
         
     | 
| 740 | 
         
            +
                },
         
     | 
| 741 | 
         
            +
                "126172": {
         
     | 
| 742 | 
         
            +
                  "content": "<|reserved_token_88|>",
         
     | 
| 743 | 
         
            +
                  "lstrip": false,
         
     | 
| 744 | 
         
            +
                  "normalized": false,
         
     | 
| 745 | 
         
            +
                  "rstrip": false,
         
     | 
| 746 | 
         
            +
                  "single_word": false,
         
     | 
| 747 | 
         
            +
                  "special": true
         
     | 
| 748 | 
         
            +
                },
         
     | 
| 749 | 
         
            +
                "126173": {
         
     | 
| 750 | 
         
            +
                  "content": "<|reserved_token_89|>",
         
     | 
| 751 | 
         
            +
                  "lstrip": false,
         
     | 
| 752 | 
         
            +
                  "normalized": false,
         
     | 
| 753 | 
         
            +
                  "rstrip": false,
         
     | 
| 754 | 
         
            +
                  "single_word": false,
         
     | 
| 755 | 
         
            +
                  "special": true
         
     | 
| 756 | 
         
            +
                },
         
     | 
| 757 | 
         
            +
                "126174": {
         
     | 
| 758 | 
         
            +
                  "content": "<|reserved_token_90|>",
         
     | 
| 759 | 
         
            +
                  "lstrip": false,
         
     | 
| 760 | 
         
            +
                  "normalized": false,
         
     | 
| 761 | 
         
            +
                  "rstrip": false,
         
     | 
| 762 | 
         
            +
                  "single_word": false,
         
     | 
| 763 | 
         
            +
                  "special": true
         
     | 
| 764 | 
         
            +
                },
         
     | 
| 765 | 
         
            +
                "126175": {
         
     | 
| 766 | 
         
            +
                  "content": "<|reserved_token_91|>",
         
     | 
| 767 | 
         
            +
                  "lstrip": false,
         
     | 
| 768 | 
         
            +
                  "normalized": false,
         
     | 
| 769 | 
         
            +
                  "rstrip": false,
         
     | 
| 770 | 
         
            +
                  "single_word": false,
         
     | 
| 771 | 
         
            +
                  "special": true
         
     | 
| 772 | 
         
            +
                },
         
     | 
| 773 | 
         
            +
                "126176": {
         
     | 
| 774 | 
         
            +
                  "content": "<|reserved_token_92|>",
         
     | 
| 775 | 
         
            +
                  "lstrip": false,
         
     | 
| 776 | 
         
            +
                  "normalized": false,
         
     | 
| 777 | 
         
            +
                  "rstrip": false,
         
     | 
| 778 | 
         
            +
                  "single_word": false,
         
     | 
| 779 | 
         
            +
                  "special": true
         
     | 
| 780 | 
         
            +
                },
         
     | 
| 781 | 
         
            +
                "126177": {
         
     | 
| 782 | 
         
            +
                  "content": "<|reserved_token_93|>",
         
     | 
| 783 | 
         
            +
                  "lstrip": false,
         
     | 
| 784 | 
         
            +
                  "normalized": false,
         
     | 
| 785 | 
         
            +
                  "rstrip": false,
         
     | 
| 786 | 
         
            +
                  "single_word": false,
         
     | 
| 787 | 
         
            +
                  "special": true
         
     | 
| 788 | 
         
            +
                },
         
     | 
| 789 | 
         
            +
                "126178": {
         
     | 
| 790 | 
         
            +
                  "content": "<|reserved_token_94|>",
         
     | 
| 791 | 
         
            +
                  "lstrip": false,
         
     | 
| 792 | 
         
            +
                  "normalized": false,
         
     | 
| 793 | 
         
            +
                  "rstrip": false,
         
     | 
| 794 | 
         
            +
                  "single_word": false,
         
     | 
| 795 | 
         
            +
                  "special": true
         
     | 
| 796 | 
         
            +
                },
         
     | 
| 797 | 
         
            +
                "126179": {
         
     | 
| 798 | 
         
            +
                  "content": "<|reserved_token_95|>",
         
     | 
| 799 | 
         
            +
                  "lstrip": false,
         
     | 
| 800 | 
         
            +
                  "normalized": false,
         
     | 
| 801 | 
         
            +
                  "rstrip": false,
         
     | 
| 802 | 
         
            +
                  "single_word": false,
         
     | 
| 803 | 
         
            +
                  "special": true
         
     | 
| 804 | 
         
            +
                },
         
     | 
| 805 | 
         
            +
                "126180": {
         
     | 
| 806 | 
         
            +
                  "content": "<|reserved_token_96|>",
         
     | 
| 807 | 
         
            +
                  "lstrip": false,
         
     | 
| 808 | 
         
            +
                  "normalized": false,
         
     | 
| 809 | 
         
            +
                  "rstrip": false,
         
     | 
| 810 | 
         
            +
                  "single_word": false,
         
     | 
| 811 | 
         
            +
                  "special": true
         
     | 
| 812 | 
         
            +
                },
         
     | 
| 813 | 
         
            +
                "126181": {
         
     | 
| 814 | 
         
            +
                  "content": "<|reserved_token_97|>",
         
     | 
| 815 | 
         
            +
                  "lstrip": false,
         
     | 
| 816 | 
         
            +
                  "normalized": false,
         
     | 
| 817 | 
         
            +
                  "rstrip": false,
         
     | 
| 818 | 
         
            +
                  "single_word": false,
         
     | 
| 819 | 
         
            +
                  "special": true
         
     | 
| 820 | 
         
            +
                },
         
     | 
| 821 | 
         
            +
                "126182": {
         
     | 
| 822 | 
         
            +
                  "content": "<|reserved_token_98|>",
         
     | 
| 823 | 
         
            +
                  "lstrip": false,
         
     | 
| 824 | 
         
            +
                  "normalized": false,
         
     | 
| 825 | 
         
            +
                  "rstrip": false,
         
     | 
| 826 | 
         
            +
                  "single_word": false,
         
     | 
| 827 | 
         
            +
                  "special": true
         
     | 
| 828 | 
         
            +
                },
         
     | 
| 829 | 
         
            +
                "126183": {
         
     | 
| 830 | 
         
            +
                  "content": "<|reserved_token_99|>",
         
     | 
| 831 | 
         
            +
                  "lstrip": false,
         
     | 
| 832 | 
         
            +
                  "normalized": false,
         
     | 
| 833 | 
         
            +
                  "rstrip": false,
         
     | 
| 834 | 
         
            +
                  "single_word": false,
         
     | 
| 835 | 
         
            +
                  "special": true
         
     | 
| 836 | 
         
            +
                },
         
     | 
| 837 | 
         
            +
                "126184": {
         
     | 
| 838 | 
         
            +
                  "content": "<|reserved_token_100|>",
         
     | 
| 839 | 
         
            +
                  "lstrip": false,
         
     | 
| 840 | 
         
            +
                  "normalized": false,
         
     | 
| 841 | 
         
            +
                  "rstrip": false,
         
     | 
| 842 | 
         
            +
                  "single_word": false,
         
     | 
| 843 | 
         
            +
                  "special": true
         
     | 
| 844 | 
         
            +
                },
         
     | 
| 845 | 
         
            +
                "126185": {
         
     | 
| 846 | 
         
            +
                  "content": "<|reserved_token_101|>",
         
     | 
| 847 | 
         
            +
                  "lstrip": false,
         
     | 
| 848 | 
         
            +
                  "normalized": false,
         
     | 
| 849 | 
         
            +
                  "rstrip": false,
         
     | 
| 850 | 
         
            +
                  "single_word": false,
         
     | 
| 851 | 
         
            +
                  "special": true
         
     | 
| 852 | 
         
            +
                },
         
     | 
| 853 | 
         
            +
                "126186": {
         
     | 
| 854 | 
         
            +
                  "content": "<|reserved_token_102|>",
         
     | 
| 855 | 
         
            +
                  "lstrip": false,
         
     | 
| 856 | 
         
            +
                  "normalized": false,
         
     | 
| 857 | 
         
            +
                  "rstrip": false,
         
     | 
| 858 | 
         
            +
                  "single_word": false,
         
     | 
| 859 | 
         
            +
                  "special": true
         
     | 
| 860 | 
         
            +
                },
         
     | 
| 861 | 
         
            +
                "126187": {
         
     | 
| 862 | 
         
            +
                  "content": "<|reserved_token_103|>",
         
     | 
| 863 | 
         
            +
                  "lstrip": false,
         
     | 
| 864 | 
         
            +
                  "normalized": false,
         
     | 
| 865 | 
         
            +
                  "rstrip": false,
         
     | 
| 866 | 
         
            +
                  "single_word": false,
         
     | 
| 867 | 
         
            +
                  "special": true
         
     | 
| 868 | 
         
            +
                },
         
     | 
| 869 | 
         
            +
                "126188": {
         
     | 
| 870 | 
         
            +
                  "content": "<|reserved_token_104|>",
         
     | 
| 871 | 
         
            +
                  "lstrip": false,
         
     | 
| 872 | 
         
            +
                  "normalized": false,
         
     | 
| 873 | 
         
            +
                  "rstrip": false,
         
     | 
| 874 | 
         
            +
                  "single_word": false,
         
     | 
| 875 | 
         
            +
                  "special": true
         
     | 
| 876 | 
         
            +
                },
         
     | 
| 877 | 
         
            +
                "126189": {
         
     | 
| 878 | 
         
            +
                  "content": "<|reserved_token_105|>",
         
     | 
| 879 | 
         
            +
                  "lstrip": false,
         
     | 
| 880 | 
         
            +
                  "normalized": false,
         
     | 
| 881 | 
         
            +
                  "rstrip": false,
         
     | 
| 882 | 
         
            +
                  "single_word": false,
         
     | 
| 883 | 
         
            +
                  "special": true
         
     | 
| 884 | 
         
            +
                },
         
     | 
| 885 | 
         
            +
                "126190": {
         
     | 
| 886 | 
         
            +
                  "content": "<|reserved_token_106|>",
         
     | 
| 887 | 
         
            +
                  "lstrip": false,
         
     | 
| 888 | 
         
            +
                  "normalized": false,
         
     | 
| 889 | 
         
            +
                  "rstrip": false,
         
     | 
| 890 | 
         
            +
                  "single_word": false,
         
     | 
| 891 | 
         
            +
                  "special": true
         
     | 
| 892 | 
         
            +
                },
         
     | 
| 893 | 
         
            +
                "126191": {
         
     | 
| 894 | 
         
            +
                  "content": "<|reserved_token_107|>",
         
     | 
| 895 | 
         
            +
                  "lstrip": false,
         
     | 
| 896 | 
         
            +
                  "normalized": false,
         
     | 
| 897 | 
         
            +
                  "rstrip": false,
         
     | 
| 898 | 
         
            +
                  "single_word": false,
         
     | 
| 899 | 
         
            +
                  "special": true
         
     | 
| 900 | 
         
            +
                },
         
     | 
| 901 | 
         
            +
                "126192": {
         
     | 
| 902 | 
         
            +
                  "content": "<|reserved_token_108|>",
         
     | 
| 903 | 
         
            +
                  "lstrip": false,
         
     | 
| 904 | 
         
            +
                  "normalized": false,
         
     | 
| 905 | 
         
            +
                  "rstrip": false,
         
     | 
| 906 | 
         
            +
                  "single_word": false,
         
     | 
| 907 | 
         
            +
                  "special": true
         
     | 
| 908 | 
         
            +
                },
         
     | 
| 909 | 
         
            +
                "126193": {
         
     | 
| 910 | 
         
            +
                  "content": "<|reserved_token_109|>",
         
     | 
| 911 | 
         
            +
                  "lstrip": false,
         
     | 
| 912 | 
         
            +
                  "normalized": false,
         
     | 
| 913 | 
         
            +
                  "rstrip": false,
         
     | 
| 914 | 
         
            +
                  "single_word": false,
         
     | 
| 915 | 
         
            +
                  "special": true
         
     | 
| 916 | 
         
            +
                },
         
     | 
| 917 | 
         
            +
                "126194": {
         
     | 
| 918 | 
         
            +
                  "content": "<|reserved_token_110|>",
         
     | 
| 919 | 
         
            +
                  "lstrip": false,
         
     | 
| 920 | 
         
            +
                  "normalized": false,
         
     | 
| 921 | 
         
            +
                  "rstrip": false,
         
     | 
| 922 | 
         
            +
                  "single_word": false,
         
     | 
| 923 | 
         
            +
                  "special": true
         
     | 
| 924 | 
         
            +
                },
         
     | 
| 925 | 
         
            +
                "126195": {
         
     | 
| 926 | 
         
            +
                  "content": "<|reserved_token_111|>",
         
     | 
| 927 | 
         
            +
                  "lstrip": false,
         
     | 
| 928 | 
         
            +
                  "normalized": false,
         
     | 
| 929 | 
         
            +
                  "rstrip": false,
         
     | 
| 930 | 
         
            +
                  "single_word": false,
         
     | 
| 931 | 
         
            +
                  "special": true
         
     | 
| 932 | 
         
            +
                },
         
     | 
| 933 | 
         
            +
                "126196": {
         
     | 
| 934 | 
         
            +
                  "content": "<|reserved_token_112|>",
         
     | 
| 935 | 
         
            +
                  "lstrip": false,
         
     | 
| 936 | 
         
            +
                  "normalized": false,
         
     | 
| 937 | 
         
            +
                  "rstrip": false,
         
     | 
| 938 | 
         
            +
                  "single_word": false,
         
     | 
| 939 | 
         
            +
                  "special": true
         
     | 
| 940 | 
         
            +
                },
         
     | 
| 941 | 
         
            +
                "126197": {
         
     | 
| 942 | 
         
            +
                  "content": "<|reserved_token_113|>",
         
     | 
| 943 | 
         
            +
                  "lstrip": false,
         
     | 
| 944 | 
         
            +
                  "normalized": false,
         
     | 
| 945 | 
         
            +
                  "rstrip": false,
         
     | 
| 946 | 
         
            +
                  "single_word": false,
         
     | 
| 947 | 
         
            +
                  "special": true
         
     | 
| 948 | 
         
            +
                },
         
     | 
| 949 | 
         
            +
                "126198": {
         
     | 
| 950 | 
         
            +
                  "content": "<|reserved_token_114|>",
         
     | 
| 951 | 
         
            +
                  "lstrip": false,
         
     | 
| 952 | 
         
            +
                  "normalized": false,
         
     | 
| 953 | 
         
            +
                  "rstrip": false,
         
     | 
| 954 | 
         
            +
                  "single_word": false,
         
     | 
| 955 | 
         
            +
                  "special": true
         
     | 
| 956 | 
         
            +
                },
         
     | 
| 957 | 
         
            +
                "126199": {
         
     | 
| 958 | 
         
            +
                  "content": "<|reserved_token_115|>",
         
     | 
| 959 | 
         
            +
                  "lstrip": false,
         
     | 
| 960 | 
         
            +
                  "normalized": false,
         
     | 
| 961 | 
         
            +
                  "rstrip": false,
         
     | 
| 962 | 
         
            +
                  "single_word": false,
         
     | 
| 963 | 
         
            +
                  "special": true
         
     | 
| 964 | 
         
            +
                },
         
     | 
| 965 | 
         
            +
                "126200": {
         
     | 
| 966 | 
         
            +
                  "content": "<|reserved_token_116|>",
         
     | 
| 967 | 
         
            +
                  "lstrip": false,
         
     | 
| 968 | 
         
            +
                  "normalized": false,
         
     | 
| 969 | 
         
            +
                  "rstrip": false,
         
     | 
| 970 | 
         
            +
                  "single_word": false,
         
     | 
| 971 | 
         
            +
                  "special": true
         
     | 
| 972 | 
         
            +
                },
         
     | 
| 973 | 
         
            +
                "126201": {
         
     | 
| 974 | 
         
            +
                  "content": "<|reserved_token_117|>",
         
     | 
| 975 | 
         
            +
                  "lstrip": false,
         
     | 
| 976 | 
         
            +
                  "normalized": false,
         
     | 
| 977 | 
         
            +
                  "rstrip": false,
         
     | 
| 978 | 
         
            +
                  "single_word": false,
         
     | 
| 979 | 
         
            +
                  "special": true
         
     | 
| 980 | 
         
            +
                },
         
     | 
| 981 | 
         
            +
                "126202": {
         
     | 
| 982 | 
         
            +
                  "content": "<|reserved_token_118|>",
         
     | 
| 983 | 
         
            +
                  "lstrip": false,
         
     | 
| 984 | 
         
            +
                  "normalized": false,
         
     | 
| 985 | 
         
            +
                  "rstrip": false,
         
     | 
| 986 | 
         
            +
                  "single_word": false,
         
     | 
| 987 | 
         
            +
                  "special": true
         
     | 
| 988 | 
         
            +
                },
         
     | 
| 989 | 
         
            +
                "126203": {
         
     | 
| 990 | 
         
            +
                  "content": "<|reserved_token_119|>",
         
     | 
| 991 | 
         
            +
                  "lstrip": false,
         
     | 
| 992 | 
         
            +
                  "normalized": false,
         
     | 
| 993 | 
         
            +
                  "rstrip": false,
         
     | 
| 994 | 
         
            +
                  "single_word": false,
         
     | 
| 995 | 
         
            +
                  "special": true
         
     | 
| 996 | 
         
            +
                },
         
     | 
| 997 | 
         
            +
                "126204": {
         
     | 
| 998 | 
         
            +
                  "content": "<|reserved_token_120|>",
         
     | 
| 999 | 
         
            +
                  "lstrip": false,
         
     | 
| 1000 | 
         
            +
                  "normalized": false,
         
     | 
| 1001 | 
         
            +
                  "rstrip": false,
         
     | 
| 1002 | 
         
            +
                  "single_word": false,
         
     | 
| 1003 | 
         
            +
                  "special": true
         
     | 
| 1004 | 
         
            +
                },
         
     | 
| 1005 | 
         
            +
                "126205": {
         
     | 
| 1006 | 
         
            +
                  "content": "<|reserved_token_121|>",
         
     | 
| 1007 | 
         
            +
                  "lstrip": false,
         
     | 
| 1008 | 
         
            +
                  "normalized": false,
         
     | 
| 1009 | 
         
            +
                  "rstrip": false,
         
     | 
| 1010 | 
         
            +
                  "single_word": false,
         
     | 
| 1011 | 
         
            +
                  "special": true
         
     | 
| 1012 | 
         
            +
                },
         
     | 
| 1013 | 
         
            +
                "126206": {
         
     | 
| 1014 | 
         
            +
                  "content": "<|reserved_token_122|>",
         
     | 
| 1015 | 
         
            +
                  "lstrip": false,
         
     | 
| 1016 | 
         
            +
                  "normalized": false,
         
     | 
| 1017 | 
         
            +
                  "rstrip": false,
         
     | 
| 1018 | 
         
            +
                  "single_word": false,
         
     | 
| 1019 | 
         
            +
                  "special": true
         
     | 
| 1020 | 
         
            +
                },
         
     | 
| 1021 | 
         
            +
                "126207": {
         
     | 
| 1022 | 
         
            +
                  "content": "<|reserved_token_123|>",
         
     | 
| 1023 | 
         
            +
                  "lstrip": false,
         
     | 
| 1024 | 
         
            +
                  "normalized": false,
         
     | 
| 1025 | 
         
            +
                  "rstrip": false,
         
     | 
| 1026 | 
         
            +
                  "single_word": false,
         
     | 
| 1027 | 
         
            +
                  "special": true
         
     | 
| 1028 | 
         
            +
                },
         
     | 
| 1029 | 
         
            +
                "126208": {
         
     | 
| 1030 | 
         
            +
                  "content": "<|reserved_token_124|>",
         
     | 
| 1031 | 
         
            +
                  "lstrip": false,
         
     | 
| 1032 | 
         
            +
                  "normalized": false,
         
     | 
| 1033 | 
         
            +
                  "rstrip": false,
         
     | 
| 1034 | 
         
            +
                  "single_word": false,
         
     | 
| 1035 | 
         
            +
                  "special": true
         
     | 
| 1036 | 
         
            +
                },
         
     | 
| 1037 | 
         
            +
                "126209": {
         
     | 
| 1038 | 
         
            +
                  "content": "<|reserved_token_125|>",
         
     | 
| 1039 | 
         
            +
                  "lstrip": false,
         
     | 
| 1040 | 
         
            +
                  "normalized": false,
         
     | 
| 1041 | 
         
            +
                  "rstrip": false,
         
     | 
| 1042 | 
         
            +
                  "single_word": false,
         
     | 
| 1043 | 
         
            +
                  "special": true
         
     | 
| 1044 | 
         
            +
                },
         
     | 
| 1045 | 
         
            +
                "126210": {
         
     | 
| 1046 | 
         
            +
                  "content": "<|reserved_token_126|>",
         
     | 
| 1047 | 
         
            +
                  "lstrip": false,
         
     | 
| 1048 | 
         
            +
                  "normalized": false,
         
     | 
| 1049 | 
         
            +
                  "rstrip": false,
         
     | 
| 1050 | 
         
            +
                  "single_word": false,
         
     | 
| 1051 | 
         
            +
                  "special": true
         
     | 
| 1052 | 
         
            +
                },
         
     | 
| 1053 | 
         
            +
                "126211": {
         
     | 
| 1054 | 
         
            +
                  "content": "<|reserved_token_127|>",
         
     | 
| 1055 | 
         
            +
                  "lstrip": false,
         
     | 
| 1056 | 
         
            +
                  "normalized": false,
         
     | 
| 1057 | 
         
            +
                  "rstrip": false,
         
     | 
| 1058 | 
         
            +
                  "single_word": false,
         
     | 
| 1059 | 
         
            +
                  "special": true
         
     | 
| 1060 | 
         
            +
                },
         
     | 
| 1061 | 
         
            +
                "126212": {
         
     | 
| 1062 | 
         
            +
                  "content": "<|reserved_token_128|>",
         
     | 
| 1063 | 
         
            +
                  "lstrip": false,
         
     | 
| 1064 | 
         
            +
                  "normalized": false,
         
     | 
| 1065 | 
         
            +
                  "rstrip": false,
         
     | 
| 1066 | 
         
            +
                  "single_word": false,
         
     | 
| 1067 | 
         
            +
                  "special": true
         
     | 
| 1068 | 
         
            +
                },
         
     | 
| 1069 | 
         
            +
                "126213": {
         
     | 
| 1070 | 
         
            +
                  "content": "<|reserved_token_129|>",
         
     | 
| 1071 | 
         
            +
                  "lstrip": false,
         
     | 
| 1072 | 
         
            +
                  "normalized": false,
         
     | 
| 1073 | 
         
            +
                  "rstrip": false,
         
     | 
| 1074 | 
         
            +
                  "single_word": false,
         
     | 
| 1075 | 
         
            +
                  "special": true
         
     | 
| 1076 | 
         
            +
                },
         
     | 
| 1077 | 
         
            +
                "126214": {
         
     | 
| 1078 | 
         
            +
                  "content": "<|reserved_token_130|>",
         
     | 
| 1079 | 
         
            +
                  "lstrip": false,
         
     | 
| 1080 | 
         
            +
                  "normalized": false,
         
     | 
| 1081 | 
         
            +
                  "rstrip": false,
         
     | 
| 1082 | 
         
            +
                  "single_word": false,
         
     | 
| 1083 | 
         
            +
                  "special": true
         
     | 
| 1084 | 
         
            +
                },
         
     | 
| 1085 | 
         
            +
                "126215": {
         
     | 
| 1086 | 
         
            +
                  "content": "<|reserved_token_131|>",
         
     | 
| 1087 | 
         
            +
                  "lstrip": false,
         
     | 
| 1088 | 
         
            +
                  "normalized": false,
         
     | 
| 1089 | 
         
            +
                  "rstrip": false,
         
     | 
| 1090 | 
         
            +
                  "single_word": false,
         
     | 
| 1091 | 
         
            +
                  "special": true
         
     | 
| 1092 | 
         
            +
                },
         
     | 
| 1093 | 
         
            +
                "126216": {
         
     | 
| 1094 | 
         
            +
                  "content": "<|reserved_token_132|>",
         
     | 
| 1095 | 
         
            +
                  "lstrip": false,
         
     | 
| 1096 | 
         
            +
                  "normalized": false,
         
     | 
| 1097 | 
         
            +
                  "rstrip": false,
         
     | 
| 1098 | 
         
            +
                  "single_word": false,
         
     | 
| 1099 | 
         
            +
                  "special": true
         
     | 
| 1100 | 
         
            +
                },
         
     | 
| 1101 | 
         
            +
                "126217": {
         
     | 
| 1102 | 
         
            +
                  "content": "<|reserved_token_133|>",
         
     | 
| 1103 | 
         
            +
                  "lstrip": false,
         
     | 
| 1104 | 
         
            +
                  "normalized": false,
         
     | 
| 1105 | 
         
            +
                  "rstrip": false,
         
     | 
| 1106 | 
         
            +
                  "single_word": false,
         
     | 
| 1107 | 
         
            +
                  "special": true
         
     | 
| 1108 | 
         
            +
                },
         
     | 
| 1109 | 
         
            +
                "126218": {
         
     | 
| 1110 | 
         
            +
                  "content": "<|reserved_token_134|>",
         
     | 
| 1111 | 
         
            +
                  "lstrip": false,
         
     | 
| 1112 | 
         
            +
                  "normalized": false,
         
     | 
| 1113 | 
         
            +
                  "rstrip": false,
         
     | 
| 1114 | 
         
            +
                  "single_word": false,
         
     | 
| 1115 | 
         
            +
                  "special": true
         
     | 
| 1116 | 
         
            +
                },
         
     | 
| 1117 | 
         
            +
                "126219": {
         
     | 
| 1118 | 
         
            +
                  "content": "<|reserved_token_135|>",
         
     | 
| 1119 | 
         
            +
                  "lstrip": false,
         
     | 
| 1120 | 
         
            +
                  "normalized": false,
         
     | 
| 1121 | 
         
            +
                  "rstrip": false,
         
     | 
| 1122 | 
         
            +
                  "single_word": false,
         
     | 
| 1123 | 
         
            +
                  "special": true
         
     | 
| 1124 | 
         
            +
                },
         
     | 
| 1125 | 
         
            +
                "126220": {
         
     | 
| 1126 | 
         
            +
                  "content": "<|reserved_token_136|>",
         
     | 
| 1127 | 
         
            +
                  "lstrip": false,
         
     | 
| 1128 | 
         
            +
                  "normalized": false,
         
     | 
| 1129 | 
         
            +
                  "rstrip": false,
         
     | 
| 1130 | 
         
            +
                  "single_word": false,
         
     | 
| 1131 | 
         
            +
                  "special": true
         
     | 
| 1132 | 
         
            +
                },
         
     | 
| 1133 | 
         
            +
                "126221": {
         
     | 
| 1134 | 
         
            +
                  "content": "<|reserved_token_137|>",
         
     | 
| 1135 | 
         
            +
                  "lstrip": false,
         
     | 
| 1136 | 
         
            +
                  "normalized": false,
         
     | 
| 1137 | 
         
            +
                  "rstrip": false,
         
     | 
| 1138 | 
         
            +
                  "single_word": false,
         
     | 
| 1139 | 
         
            +
                  "special": true
         
     | 
| 1140 | 
         
            +
                },
         
     | 
| 1141 | 
         
            +
                "126222": {
         
     | 
| 1142 | 
         
            +
                  "content": "<|reserved_token_138|>",
         
     | 
| 1143 | 
         
            +
                  "lstrip": false,
         
     | 
| 1144 | 
         
            +
                  "normalized": false,
         
     | 
| 1145 | 
         
            +
                  "rstrip": false,
         
     | 
| 1146 | 
         
            +
                  "single_word": false,
         
     | 
| 1147 | 
         
            +
                  "special": true
         
     | 
| 1148 | 
         
            +
                },
         
     | 
| 1149 | 
         
            +
                "126223": {
         
     | 
| 1150 | 
         
            +
                  "content": "<|reserved_token_139|>",
         
     | 
| 1151 | 
         
            +
                  "lstrip": false,
         
     | 
| 1152 | 
         
            +
                  "normalized": false,
         
     | 
| 1153 | 
         
            +
                  "rstrip": false,
         
     | 
| 1154 | 
         
            +
                  "single_word": false,
         
     | 
| 1155 | 
         
            +
                  "special": true
         
     | 
| 1156 | 
         
            +
                },
         
     | 
| 1157 | 
         
            +
                "126224": {
         
     | 
| 1158 | 
         
            +
                  "content": "<|reserved_token_140|>",
         
     | 
| 1159 | 
         
            +
                  "lstrip": false,
         
     | 
| 1160 | 
         
            +
                  "normalized": false,
         
     | 
| 1161 | 
         
            +
                  "rstrip": false,
         
     | 
| 1162 | 
         
            +
                  "single_word": false,
         
     | 
| 1163 | 
         
            +
                  "special": true
         
     | 
| 1164 | 
         
            +
                },
         
     | 
| 1165 | 
         
            +
                "126225": {
         
     | 
| 1166 | 
         
            +
                  "content": "<|reserved_token_141|>",
         
     | 
| 1167 | 
         
            +
                  "lstrip": false,
         
     | 
| 1168 | 
         
            +
                  "normalized": false,
         
     | 
| 1169 | 
         
            +
                  "rstrip": false,
         
     | 
| 1170 | 
         
            +
                  "single_word": false,
         
     | 
| 1171 | 
         
            +
                  "special": true
         
     | 
| 1172 | 
         
            +
                },
         
     | 
| 1173 | 
         
            +
                "126226": {
         
     | 
| 1174 | 
         
            +
                  "content": "<|reserved_token_142|>",
         
     | 
| 1175 | 
         
            +
                  "lstrip": false,
         
     | 
| 1176 | 
         
            +
                  "normalized": false,
         
     | 
| 1177 | 
         
            +
                  "rstrip": false,
         
     | 
| 1178 | 
         
            +
                  "single_word": false,
         
     | 
| 1179 | 
         
            +
                  "special": true
         
     | 
| 1180 | 
         
            +
                },
         
     | 
| 1181 | 
         
            +
                "126227": {
         
     | 
| 1182 | 
         
            +
                  "content": "<|reserved_token_143|>",
         
     | 
| 1183 | 
         
            +
                  "lstrip": false,
         
     | 
| 1184 | 
         
            +
                  "normalized": false,
         
     | 
| 1185 | 
         
            +
                  "rstrip": false,
         
     | 
| 1186 | 
         
            +
                  "single_word": false,
         
     | 
| 1187 | 
         
            +
                  "special": true
         
     | 
| 1188 | 
         
            +
                },
         
     | 
| 1189 | 
         
            +
                "126228": {
         
     | 
| 1190 | 
         
            +
                  "content": "<|reserved_token_144|>",
         
     | 
| 1191 | 
         
            +
                  "lstrip": false,
         
     | 
| 1192 | 
         
            +
                  "normalized": false,
         
     | 
| 1193 | 
         
            +
                  "rstrip": false,
         
     | 
| 1194 | 
         
            +
                  "single_word": false,
         
     | 
| 1195 | 
         
            +
                  "special": true
         
     | 
| 1196 | 
         
            +
                },
         
     | 
| 1197 | 
         
            +
                "126229": {
         
     | 
| 1198 | 
         
            +
                  "content": "<|reserved_token_145|>",
         
     | 
| 1199 | 
         
            +
                  "lstrip": false,
         
     | 
| 1200 | 
         
            +
                  "normalized": false,
         
     | 
| 1201 | 
         
            +
                  "rstrip": false,
         
     | 
| 1202 | 
         
            +
                  "single_word": false,
         
     | 
| 1203 | 
         
            +
                  "special": true
         
     | 
| 1204 | 
         
            +
                },
         
     | 
| 1205 | 
         
            +
                "126230": {
         
     | 
| 1206 | 
         
            +
                  "content": "<|reserved_token_146|>",
         
     | 
| 1207 | 
         
            +
                  "lstrip": false,
         
     | 
| 1208 | 
         
            +
                  "normalized": false,
         
     | 
| 1209 | 
         
            +
                  "rstrip": false,
         
     | 
| 1210 | 
         
            +
                  "single_word": false,
         
     | 
| 1211 | 
         
            +
                  "special": true
         
     | 
| 1212 | 
         
            +
                },
         
     | 
| 1213 | 
         
            +
                "126231": {
         
     | 
| 1214 | 
         
            +
                  "content": "<|reserved_token_147|>",
         
     | 
| 1215 | 
         
            +
                  "lstrip": false,
         
     | 
| 1216 | 
         
            +
                  "normalized": false,
         
     | 
| 1217 | 
         
            +
                  "rstrip": false,
         
     | 
| 1218 | 
         
            +
                  "single_word": false,
         
     | 
| 1219 | 
         
            +
                  "special": true
         
     | 
| 1220 | 
         
            +
                },
         
     | 
| 1221 | 
         
            +
                "126232": {
         
     | 
| 1222 | 
         
            +
                  "content": "<|reserved_token_148|>",
         
     | 
| 1223 | 
         
            +
                  "lstrip": false,
         
     | 
| 1224 | 
         
            +
                  "normalized": false,
         
     | 
| 1225 | 
         
            +
                  "rstrip": false,
         
     | 
| 1226 | 
         
            +
                  "single_word": false,
         
     | 
| 1227 | 
         
            +
                  "special": true
         
     | 
| 1228 | 
         
            +
                },
         
     | 
| 1229 | 
         
            +
                "126233": {
         
     | 
| 1230 | 
         
            +
                  "content": "<|reserved_token_149|>",
         
     | 
| 1231 | 
         
            +
                  "lstrip": false,
         
     | 
| 1232 | 
         
            +
                  "normalized": false,
         
     | 
| 1233 | 
         
            +
                  "rstrip": false,
         
     | 
| 1234 | 
         
            +
                  "single_word": false,
         
     | 
| 1235 | 
         
            +
                  "special": true
         
     | 
| 1236 | 
         
            +
                },
         
     | 
| 1237 | 
         
            +
                "126234": {
         
     | 
| 1238 | 
         
            +
                  "content": "<|reserved_token_150|>",
         
     | 
| 1239 | 
         
            +
                  "lstrip": false,
         
     | 
| 1240 | 
         
            +
                  "normalized": false,
         
     | 
| 1241 | 
         
            +
                  "rstrip": false,
         
     | 
| 1242 | 
         
            +
                  "single_word": false,
         
     | 
| 1243 | 
         
            +
                  "special": true
         
     | 
| 1244 | 
         
            +
                },
         
     | 
| 1245 | 
         
            +
                "126235": {
         
     | 
| 1246 | 
         
            +
                  "content": "<|reserved_token_151|>",
         
     | 
| 1247 | 
         
            +
                  "lstrip": false,
         
     | 
| 1248 | 
         
            +
                  "normalized": false,
         
     | 
| 1249 | 
         
            +
                  "rstrip": false,
         
     | 
| 1250 | 
         
            +
                  "single_word": false,
         
     | 
| 1251 | 
         
            +
                  "special": true
         
     | 
| 1252 | 
         
            +
                },
         
     | 
| 1253 | 
         
            +
                "126236": {
         
     | 
| 1254 | 
         
            +
                  "content": "<|reserved_token_152|>",
         
     | 
| 1255 | 
         
            +
                  "lstrip": false,
         
     | 
| 1256 | 
         
            +
                  "normalized": false,
         
     | 
| 1257 | 
         
            +
                  "rstrip": false,
         
     | 
| 1258 | 
         
            +
                  "single_word": false,
         
     | 
| 1259 | 
         
            +
                  "special": true
         
     | 
| 1260 | 
         
            +
                },
         
     | 
| 1261 | 
         
            +
                "126237": {
         
     | 
| 1262 | 
         
            +
                  "content": "<|reserved_token_153|>",
         
     | 
| 1263 | 
         
            +
                  "lstrip": false,
         
     | 
| 1264 | 
         
            +
                  "normalized": false,
         
     | 
| 1265 | 
         
            +
                  "rstrip": false,
         
     | 
| 1266 | 
         
            +
                  "single_word": false,
         
     | 
| 1267 | 
         
            +
                  "special": true
         
     | 
| 1268 | 
         
            +
                },
         
     | 
| 1269 | 
         
            +
                "126238": {
         
     | 
| 1270 | 
         
            +
                  "content": "<|reserved_token_154|>",
         
     | 
| 1271 | 
         
            +
                  "lstrip": false,
         
     | 
| 1272 | 
         
            +
                  "normalized": false,
         
     | 
| 1273 | 
         
            +
                  "rstrip": false,
         
     | 
| 1274 | 
         
            +
                  "single_word": false,
         
     | 
| 1275 | 
         
            +
                  "special": true
         
     | 
| 1276 | 
         
            +
                },
         
     | 
| 1277 | 
         
            +
                "126239": {
         
     | 
| 1278 | 
         
            +
                  "content": "<|reserved_token_155|>",
         
     | 
| 1279 | 
         
            +
                  "lstrip": false,
         
     | 
| 1280 | 
         
            +
                  "normalized": false,
         
     | 
| 1281 | 
         
            +
                  "rstrip": false,
         
     | 
| 1282 | 
         
            +
                  "single_word": false,
         
     | 
| 1283 | 
         
            +
                  "special": true
         
     | 
| 1284 | 
         
            +
                },
         
     | 
| 1285 | 
         
            +
                "126240": {
         
     | 
| 1286 | 
         
            +
                  "content": "<|reserved_token_156|>",
         
     | 
| 1287 | 
         
            +
                  "lstrip": false,
         
     | 
| 1288 | 
         
            +
                  "normalized": false,
         
     | 
| 1289 | 
         
            +
                  "rstrip": false,
         
     | 
| 1290 | 
         
            +
                  "single_word": false,
         
     | 
| 1291 | 
         
            +
                  "special": true
         
     | 
| 1292 | 
         
            +
                },
         
     | 
| 1293 | 
         
            +
                "126241": {
         
     | 
| 1294 | 
         
            +
                  "content": "<|reserved_token_157|>",
         
     | 
| 1295 | 
         
            +
                  "lstrip": false,
         
     | 
| 1296 | 
         
            +
                  "normalized": false,
         
     | 
| 1297 | 
         
            +
                  "rstrip": false,
         
     | 
| 1298 | 
         
            +
                  "single_word": false,
         
     | 
| 1299 | 
         
            +
                  "special": true
         
     | 
| 1300 | 
         
            +
                },
         
     | 
| 1301 | 
         
            +
                "126242": {
         
     | 
| 1302 | 
         
            +
                  "content": "<|reserved_token_158|>",
         
     | 
| 1303 | 
         
            +
                  "lstrip": false,
         
     | 
| 1304 | 
         
            +
                  "normalized": false,
         
     | 
| 1305 | 
         
            +
                  "rstrip": false,
         
     | 
| 1306 | 
         
            +
                  "single_word": false,
         
     | 
| 1307 | 
         
            +
                  "special": true
         
     | 
| 1308 | 
         
            +
                },
         
     | 
| 1309 | 
         
            +
                "126243": {
         
     | 
| 1310 | 
         
            +
                  "content": "<|reserved_token_159|>",
         
     | 
| 1311 | 
         
            +
                  "lstrip": false,
         
     | 
| 1312 | 
         
            +
                  "normalized": false,
         
     | 
| 1313 | 
         
            +
                  "rstrip": false,
         
     | 
| 1314 | 
         
            +
                  "single_word": false,
         
     | 
| 1315 | 
         
            +
                  "special": true
         
     | 
| 1316 | 
         
            +
                },
         
     | 
| 1317 | 
         
            +
                "126244": {
         
     | 
| 1318 | 
         
            +
                  "content": "<|reserved_token_160|>",
         
     | 
| 1319 | 
         
            +
                  "lstrip": false,
         
     | 
| 1320 | 
         
            +
                  "normalized": false,
         
     | 
| 1321 | 
         
            +
                  "rstrip": false,
         
     | 
| 1322 | 
         
            +
                  "single_word": false,
         
     | 
| 1323 | 
         
            +
                  "special": true
         
     | 
| 1324 | 
         
            +
                },
         
     | 
| 1325 | 
         
            +
                "126245": {
         
     | 
| 1326 | 
         
            +
                  "content": "<|reserved_token_161|>",
         
     | 
| 1327 | 
         
            +
                  "lstrip": false,
         
     | 
| 1328 | 
         
            +
                  "normalized": false,
         
     | 
| 1329 | 
         
            +
                  "rstrip": false,
         
     | 
| 1330 | 
         
            +
                  "single_word": false,
         
     | 
| 1331 | 
         
            +
                  "special": true
         
     | 
| 1332 | 
         
            +
                },
         
     | 
| 1333 | 
         
            +
                "126246": {
         
     | 
| 1334 | 
         
            +
                  "content": "<|reserved_token_162|>",
         
     | 
| 1335 | 
         
            +
                  "lstrip": false,
         
     | 
| 1336 | 
         
            +
                  "normalized": false,
         
     | 
| 1337 | 
         
            +
                  "rstrip": false,
         
     | 
| 1338 | 
         
            +
                  "single_word": false,
         
     | 
| 1339 | 
         
            +
                  "special": true
         
     | 
| 1340 | 
         
            +
                },
         
     | 
| 1341 | 
         
            +
                "126247": {
         
     | 
| 1342 | 
         
            +
                  "content": "<|reserved_token_163|>",
         
     | 
| 1343 | 
         
            +
                  "lstrip": false,
         
     | 
| 1344 | 
         
            +
                  "normalized": false,
         
     | 
| 1345 | 
         
            +
                  "rstrip": false,
         
     | 
| 1346 | 
         
            +
                  "single_word": false,
         
     | 
| 1347 | 
         
            +
                  "special": true
         
     | 
| 1348 | 
         
            +
                },
         
     | 
| 1349 | 
         
            +
                "126248": {
         
     | 
| 1350 | 
         
            +
                  "content": "<|reserved_token_164|>",
         
     | 
| 1351 | 
         
            +
                  "lstrip": false,
         
     | 
| 1352 | 
         
            +
                  "normalized": false,
         
     | 
| 1353 | 
         
            +
                  "rstrip": false,
         
     | 
| 1354 | 
         
            +
                  "single_word": false,
         
     | 
| 1355 | 
         
            +
                  "special": true
         
     | 
| 1356 | 
         
            +
                },
         
     | 
| 1357 | 
         
            +
                "126249": {
         
     | 
| 1358 | 
         
            +
                  "content": "<|reserved_token_165|>",
         
     | 
| 1359 | 
         
            +
                  "lstrip": false,
         
     | 
| 1360 | 
         
            +
                  "normalized": false,
         
     | 
| 1361 | 
         
            +
                  "rstrip": false,
         
     | 
| 1362 | 
         
            +
                  "single_word": false,
         
     | 
| 1363 | 
         
            +
                  "special": true
         
     | 
| 1364 | 
         
            +
                },
         
     | 
| 1365 | 
         
            +
                "126250": {
         
     | 
| 1366 | 
         
            +
                  "content": "<|reserved_token_166|>",
         
     | 
| 1367 | 
         
            +
                  "lstrip": false,
         
     | 
| 1368 | 
         
            +
                  "normalized": false,
         
     | 
| 1369 | 
         
            +
                  "rstrip": false,
         
     | 
| 1370 | 
         
            +
                  "single_word": false,
         
     | 
| 1371 | 
         
            +
                  "special": true
         
     | 
| 1372 | 
         
            +
                },
         
     | 
| 1373 | 
         
            +
                "126251": {
         
     | 
| 1374 | 
         
            +
                  "content": "<|reserved_token_167|>",
         
     | 
| 1375 | 
         
            +
                  "lstrip": false,
         
     | 
| 1376 | 
         
            +
                  "normalized": false,
         
     | 
| 1377 | 
         
            +
                  "rstrip": false,
         
     | 
| 1378 | 
         
            +
                  "single_word": false,
         
     | 
| 1379 | 
         
            +
                  "special": true
         
     | 
| 1380 | 
         
            +
                },
         
     | 
| 1381 | 
         
            +
                "126252": {
         
     | 
| 1382 | 
         
            +
                  "content": "<|reserved_token_168|>",
         
     | 
| 1383 | 
         
            +
                  "lstrip": false,
         
     | 
| 1384 | 
         
            +
                  "normalized": false,
         
     | 
| 1385 | 
         
            +
                  "rstrip": false,
         
     | 
| 1386 | 
         
            +
                  "single_word": false,
         
     | 
| 1387 | 
         
            +
                  "special": true
         
     | 
| 1388 | 
         
            +
                },
         
     | 
| 1389 | 
         
            +
                "126253": {
         
     | 
| 1390 | 
         
            +
                  "content": "<|reserved_token_169|>",
         
     | 
| 1391 | 
         
            +
                  "lstrip": false,
         
     | 
| 1392 | 
         
            +
                  "normalized": false,
         
     | 
| 1393 | 
         
            +
                  "rstrip": false,
         
     | 
| 1394 | 
         
            +
                  "single_word": false,
         
     | 
| 1395 | 
         
            +
                  "special": true
         
     | 
| 1396 | 
         
            +
                },
         
     | 
| 1397 | 
         
            +
                "126254": {
         
     | 
| 1398 | 
         
            +
                  "content": "<|reserved_token_170|>",
         
     | 
| 1399 | 
         
            +
                  "lstrip": false,
         
     | 
| 1400 | 
         
            +
                  "normalized": false,
         
     | 
| 1401 | 
         
            +
                  "rstrip": false,
         
     | 
| 1402 | 
         
            +
                  "single_word": false,
         
     | 
| 1403 | 
         
            +
                  "special": true
         
     | 
| 1404 | 
         
            +
                },
         
     | 
| 1405 | 
         
            +
                "126255": {
         
     | 
| 1406 | 
         
            +
                  "content": "<|reserved_token_171|>",
         
     | 
| 1407 | 
         
            +
                  "lstrip": false,
         
     | 
| 1408 | 
         
            +
                  "normalized": false,
         
     | 
| 1409 | 
         
            +
                  "rstrip": false,
         
     | 
| 1410 | 
         
            +
                  "single_word": false,
         
     | 
| 1411 | 
         
            +
                  "special": true
         
     | 
| 1412 | 
         
            +
                },
         
     | 
| 1413 | 
         
            +
                "126256": {
         
     | 
| 1414 | 
         
            +
                  "content": "<|reserved_token_172|>",
         
     | 
| 1415 | 
         
            +
                  "lstrip": false,
         
     | 
| 1416 | 
         
            +
                  "normalized": false,
         
     | 
| 1417 | 
         
            +
                  "rstrip": false,
         
     | 
| 1418 | 
         
            +
                  "single_word": false,
         
     | 
| 1419 | 
         
            +
                  "special": true
         
     | 
| 1420 | 
         
            +
                },
         
     | 
| 1421 | 
         
            +
                "126257": {
         
     | 
| 1422 | 
         
            +
                  "content": "<|reserved_token_173|>",
         
     | 
| 1423 | 
         
            +
                  "lstrip": false,
         
     | 
| 1424 | 
         
            +
                  "normalized": false,
         
     | 
| 1425 | 
         
            +
                  "rstrip": false,
         
     | 
| 1426 | 
         
            +
                  "single_word": false,
         
     | 
| 1427 | 
         
            +
                  "special": true
         
     | 
| 1428 | 
         
            +
                },
         
     | 
| 1429 | 
         
            +
                "126258": {
         
     | 
| 1430 | 
         
            +
                  "content": "<|reserved_token_174|>",
         
     | 
| 1431 | 
         
            +
                  "lstrip": false,
         
     | 
| 1432 | 
         
            +
                  "normalized": false,
         
     | 
| 1433 | 
         
            +
                  "rstrip": false,
         
     | 
| 1434 | 
         
            +
                  "single_word": false,
         
     | 
| 1435 | 
         
            +
                  "special": true
         
     | 
| 1436 | 
         
            +
                },
         
     | 
| 1437 | 
         
            +
                "126259": {
         
     | 
| 1438 | 
         
            +
                  "content": "<|reserved_token_175|>",
         
     | 
| 1439 | 
         
            +
                  "lstrip": false,
         
     | 
| 1440 | 
         
            +
                  "normalized": false,
         
     | 
| 1441 | 
         
            +
                  "rstrip": false,
         
     | 
| 1442 | 
         
            +
                  "single_word": false,
         
     | 
| 1443 | 
         
            +
                  "special": true
         
     | 
| 1444 | 
         
            +
                },
         
     | 
| 1445 | 
         
            +
                "126260": {
         
     | 
| 1446 | 
         
            +
                  "content": "<|reserved_token_176|>",
         
     | 
| 1447 | 
         
            +
                  "lstrip": false,
         
     | 
| 1448 | 
         
            +
                  "normalized": false,
         
     | 
| 1449 | 
         
            +
                  "rstrip": false,
         
     | 
| 1450 | 
         
            +
                  "single_word": false,
         
     | 
| 1451 | 
         
            +
                  "special": true
         
     | 
| 1452 | 
         
            +
                },
         
     | 
| 1453 | 
         
            +
                "126261": {
         
     | 
| 1454 | 
         
            +
                  "content": "<|reserved_token_177|>",
         
     | 
| 1455 | 
         
            +
                  "lstrip": false,
         
     | 
| 1456 | 
         
            +
                  "normalized": false,
         
     | 
| 1457 | 
         
            +
                  "rstrip": false,
         
     | 
| 1458 | 
         
            +
                  "single_word": false,
         
     | 
| 1459 | 
         
            +
                  "special": true
         
     | 
| 1460 | 
         
            +
                },
         
     | 
| 1461 | 
         
            +
                "126262": {
         
     | 
| 1462 | 
         
            +
                  "content": "<|reserved_token_178|>",
         
     | 
| 1463 | 
         
            +
                  "lstrip": false,
         
     | 
| 1464 | 
         
            +
                  "normalized": false,
         
     | 
| 1465 | 
         
            +
                  "rstrip": false,
         
     | 
| 1466 | 
         
            +
                  "single_word": false,
         
     | 
| 1467 | 
         
            +
                  "special": true
         
     | 
| 1468 | 
         
            +
                },
         
     | 
| 1469 | 
         
            +
                "126263": {
         
     | 
| 1470 | 
         
            +
                  "content": "<|reserved_token_179|>",
         
     | 
| 1471 | 
         
            +
                  "lstrip": false,
         
     | 
| 1472 | 
         
            +
                  "normalized": false,
         
     | 
| 1473 | 
         
            +
                  "rstrip": false,
         
     | 
| 1474 | 
         
            +
                  "single_word": false,
         
     | 
| 1475 | 
         
            +
                  "special": true
         
     | 
| 1476 | 
         
            +
                },
         
     | 
| 1477 | 
         
            +
                "126264": {
         
     | 
| 1478 | 
         
            +
                  "content": "<|reserved_token_180|>",
         
     | 
| 1479 | 
         
            +
                  "lstrip": false,
         
     | 
| 1480 | 
         
            +
                  "normalized": false,
         
     | 
| 1481 | 
         
            +
                  "rstrip": false,
         
     | 
| 1482 | 
         
            +
                  "single_word": false,
         
     | 
| 1483 | 
         
            +
                  "special": true
         
     | 
| 1484 | 
         
            +
                },
         
     | 
| 1485 | 
         
            +
                "126265": {
         
     | 
| 1486 | 
         
            +
                  "content": "<|reserved_token_181|>",
         
     | 
| 1487 | 
         
            +
                  "lstrip": false,
         
     | 
| 1488 | 
         
            +
                  "normalized": false,
         
     | 
| 1489 | 
         
            +
                  "rstrip": false,
         
     | 
| 1490 | 
         
            +
                  "single_word": false,
         
     | 
| 1491 | 
         
            +
                  "special": true
         
     | 
| 1492 | 
         
            +
                },
         
     | 
| 1493 | 
         
            +
                "126266": {
         
     | 
| 1494 | 
         
            +
                  "content": "<|reserved_token_182|>",
         
     | 
| 1495 | 
         
            +
                  "lstrip": false,
         
     | 
| 1496 | 
         
            +
                  "normalized": false,
         
     | 
| 1497 | 
         
            +
                  "rstrip": false,
         
     | 
| 1498 | 
         
            +
                  "single_word": false,
         
     | 
| 1499 | 
         
            +
                  "special": true
         
     | 
| 1500 | 
         
            +
                },
         
     | 
| 1501 | 
         
            +
                "126267": {
         
     | 
| 1502 | 
         
            +
                  "content": "<|reserved_token_183|>",
         
     | 
| 1503 | 
         
            +
                  "lstrip": false,
         
     | 
| 1504 | 
         
            +
                  "normalized": false,
         
     | 
| 1505 | 
         
            +
                  "rstrip": false,
         
     | 
| 1506 | 
         
            +
                  "single_word": false,
         
     | 
| 1507 | 
         
            +
                  "special": true
         
     | 
| 1508 | 
         
            +
                },
         
     | 
| 1509 | 
         
            +
                "126268": {
         
     | 
| 1510 | 
         
            +
                  "content": "<|reserved_token_184|>",
         
     | 
| 1511 | 
         
            +
                  "lstrip": false,
         
     | 
| 1512 | 
         
            +
                  "normalized": false,
         
     | 
| 1513 | 
         
            +
                  "rstrip": false,
         
     | 
| 1514 | 
         
            +
                  "single_word": false,
         
     | 
| 1515 | 
         
            +
                  "special": true
         
     | 
| 1516 | 
         
            +
                },
         
     | 
| 1517 | 
         
            +
                "126269": {
         
     | 
| 1518 | 
         
            +
                  "content": "<|reserved_token_185|>",
         
     | 
| 1519 | 
         
            +
                  "lstrip": false,
         
     | 
| 1520 | 
         
            +
                  "normalized": false,
         
     | 
| 1521 | 
         
            +
                  "rstrip": false,
         
     | 
| 1522 | 
         
            +
                  "single_word": false,
         
     | 
| 1523 | 
         
            +
                  "special": true
         
     | 
| 1524 | 
         
            +
                },
         
     | 
| 1525 | 
         
            +
                "126270": {
         
     | 
| 1526 | 
         
            +
                  "content": "<|reserved_token_186|>",
         
     | 
| 1527 | 
         
            +
                  "lstrip": false,
         
     | 
| 1528 | 
         
            +
                  "normalized": false,
         
     | 
| 1529 | 
         
            +
                  "rstrip": false,
         
     | 
| 1530 | 
         
            +
                  "single_word": false,
         
     | 
| 1531 | 
         
            +
                  "special": true
         
     | 
| 1532 | 
         
            +
                },
         
     | 
| 1533 | 
         
            +
                "126271": {
         
     | 
| 1534 | 
         
            +
                  "content": "<|reserved_token_187|>",
         
     | 
| 1535 | 
         
            +
                  "lstrip": false,
         
     | 
| 1536 | 
         
            +
                  "normalized": false,
         
     | 
| 1537 | 
         
            +
                  "rstrip": false,
         
     | 
| 1538 | 
         
            +
                  "single_word": false,
         
     | 
| 1539 | 
         
            +
                  "special": true
         
     | 
| 1540 | 
         
            +
                },
         
     | 
| 1541 | 
         
            +
                "126272": {
         
     | 
| 1542 | 
         
            +
                  "content": "<|reserved_token_188|>",
         
     | 
| 1543 | 
         
            +
                  "lstrip": false,
         
     | 
| 1544 | 
         
            +
                  "normalized": false,
         
     | 
| 1545 | 
         
            +
                  "rstrip": false,
         
     | 
| 1546 | 
         
            +
                  "single_word": false,
         
     | 
| 1547 | 
         
            +
                  "special": true
         
     | 
| 1548 | 
         
            +
                },
         
     | 
| 1549 | 
         
            +
                "126273": {
         
     | 
| 1550 | 
         
            +
                  "content": "<|reserved_token_189|>",
         
     | 
| 1551 | 
         
            +
                  "lstrip": false,
         
     | 
| 1552 | 
         
            +
                  "normalized": false,
         
     | 
| 1553 | 
         
            +
                  "rstrip": false,
         
     | 
| 1554 | 
         
            +
                  "single_word": false,
         
     | 
| 1555 | 
         
            +
                  "special": true
         
     | 
| 1556 | 
         
            +
                },
         
     | 
| 1557 | 
         
            +
                "126274": {
         
     | 
| 1558 | 
         
            +
                  "content": "<|reserved_token_190|>",
         
     | 
| 1559 | 
         
            +
                  "lstrip": false,
         
     | 
| 1560 | 
         
            +
                  "normalized": false,
         
     | 
| 1561 | 
         
            +
                  "rstrip": false,
         
     | 
| 1562 | 
         
            +
                  "single_word": false,
         
     | 
| 1563 | 
         
            +
                  "special": true
         
     | 
| 1564 | 
         
            +
                },
         
     | 
| 1565 | 
         
            +
                "126275": {
         
     | 
| 1566 | 
         
            +
                  "content": "<|reserved_token_191|>",
         
     | 
| 1567 | 
         
            +
                  "lstrip": false,
         
     | 
| 1568 | 
         
            +
                  "normalized": false,
         
     | 
| 1569 | 
         
            +
                  "rstrip": false,
         
     | 
| 1570 | 
         
            +
                  "single_word": false,
         
     | 
| 1571 | 
         
            +
                  "special": true
         
     | 
| 1572 | 
         
            +
                },
         
     | 
| 1573 | 
         
            +
                "126276": {
         
     | 
| 1574 | 
         
            +
                  "content": "<|reserved_token_192|>",
         
     | 
| 1575 | 
         
            +
                  "lstrip": false,
         
     | 
| 1576 | 
         
            +
                  "normalized": false,
         
     | 
| 1577 | 
         
            +
                  "rstrip": false,
         
     | 
| 1578 | 
         
            +
                  "single_word": false,
         
     | 
| 1579 | 
         
            +
                  "special": true
         
     | 
| 1580 | 
         
            +
                },
         
     | 
| 1581 | 
         
            +
                "126277": {
         
     | 
| 1582 | 
         
            +
                  "content": "<|reserved_token_193|>",
         
     | 
| 1583 | 
         
            +
                  "lstrip": false,
         
     | 
| 1584 | 
         
            +
                  "normalized": false,
         
     | 
| 1585 | 
         
            +
                  "rstrip": false,
         
     | 
| 1586 | 
         
            +
                  "single_word": false,
         
     | 
| 1587 | 
         
            +
                  "special": true
         
     | 
| 1588 | 
         
            +
                },
         
     | 
| 1589 | 
         
            +
                "126278": {
         
     | 
| 1590 | 
         
            +
                  "content": "<|reserved_token_194|>",
         
     | 
| 1591 | 
         
            +
                  "lstrip": false,
         
     | 
| 1592 | 
         
            +
                  "normalized": false,
         
     | 
| 1593 | 
         
            +
                  "rstrip": false,
         
     | 
| 1594 | 
         
            +
                  "single_word": false,
         
     | 
| 1595 | 
         
            +
                  "special": true
         
     | 
| 1596 | 
         
            +
                },
         
     | 
| 1597 | 
         
            +
                "126279": {
         
     | 
| 1598 | 
         
            +
                  "content": "<|reserved_token_195|>",
         
     | 
| 1599 | 
         
            +
                  "lstrip": false,
         
     | 
| 1600 | 
         
            +
                  "normalized": false,
         
     | 
| 1601 | 
         
            +
                  "rstrip": false,
         
     | 
| 1602 | 
         
            +
                  "single_word": false,
         
     | 
| 1603 | 
         
            +
                  "special": true
         
     | 
| 1604 | 
         
            +
                },
         
     | 
| 1605 | 
         
            +
                "126280": {
         
     | 
| 1606 | 
         
            +
                  "content": "<|reserved_token_196|>",
         
     | 
| 1607 | 
         
            +
                  "lstrip": false,
         
     | 
| 1608 | 
         
            +
                  "normalized": false,
         
     | 
| 1609 | 
         
            +
                  "rstrip": false,
         
     | 
| 1610 | 
         
            +
                  "single_word": false,
         
     | 
| 1611 | 
         
            +
                  "special": true
         
     | 
| 1612 | 
         
            +
                },
         
     | 
| 1613 | 
         
            +
                "126281": {
         
     | 
| 1614 | 
         
            +
                  "content": "<|reserved_token_197|>",
         
     | 
| 1615 | 
         
            +
                  "lstrip": false,
         
     | 
| 1616 | 
         
            +
                  "normalized": false,
         
     | 
| 1617 | 
         
            +
                  "rstrip": false,
         
     | 
| 1618 | 
         
            +
                  "single_word": false,
         
     | 
| 1619 | 
         
            +
                  "special": true
         
     | 
| 1620 | 
         
            +
                },
         
     | 
| 1621 | 
         
            +
                "126282": {
         
     | 
| 1622 | 
         
            +
                  "content": "<|reserved_token_198|>",
         
     | 
| 1623 | 
         
            +
                  "lstrip": false,
         
     | 
| 1624 | 
         
            +
                  "normalized": false,
         
     | 
| 1625 | 
         
            +
                  "rstrip": false,
         
     | 
| 1626 | 
         
            +
                  "single_word": false,
         
     | 
| 1627 | 
         
            +
                  "special": true
         
     | 
| 1628 | 
         
            +
                },
         
     | 
| 1629 | 
         
            +
                "126283": {
         
     | 
| 1630 | 
         
            +
                  "content": "<|reserved_token_199|>",
         
     | 
| 1631 | 
         
            +
                  "lstrip": false,
         
     | 
| 1632 | 
         
            +
                  "normalized": false,
         
     | 
| 1633 | 
         
            +
                  "rstrip": false,
         
     | 
| 1634 | 
         
            +
                  "single_word": false,
         
     | 
| 1635 | 
         
            +
                  "special": true
         
     | 
| 1636 | 
         
            +
                },
         
     | 
| 1637 | 
         
            +
                "126284": {
         
     | 
| 1638 | 
         
            +
                  "content": "<|reserved_token_200|>",
         
     | 
| 1639 | 
         
            +
                  "lstrip": false,
         
     | 
| 1640 | 
         
            +
                  "normalized": false,
         
     | 
| 1641 | 
         
            +
                  "rstrip": false,
         
     | 
| 1642 | 
         
            +
                  "single_word": false,
         
     | 
| 1643 | 
         
            +
                  "special": true
         
     | 
| 1644 | 
         
            +
                },
         
     | 
| 1645 | 
         
            +
                "126285": {
         
     | 
| 1646 | 
         
            +
                  "content": "<|reserved_token_201|>",
         
     | 
| 1647 | 
         
            +
                  "lstrip": false,
         
     | 
| 1648 | 
         
            +
                  "normalized": false,
         
     | 
| 1649 | 
         
            +
                  "rstrip": false,
         
     | 
| 1650 | 
         
            +
                  "single_word": false,
         
     | 
| 1651 | 
         
            +
                  "special": true
         
     | 
| 1652 | 
         
            +
                },
         
     | 
| 1653 | 
         
            +
                "126286": {
         
     | 
| 1654 | 
         
            +
                  "content": "<|reserved_token_202|>",
         
     | 
| 1655 | 
         
            +
                  "lstrip": false,
         
     | 
| 1656 | 
         
            +
                  "normalized": false,
         
     | 
| 1657 | 
         
            +
                  "rstrip": false,
         
     | 
| 1658 | 
         
            +
                  "single_word": false,
         
     | 
| 1659 | 
         
            +
                  "special": true
         
     | 
| 1660 | 
         
            +
                },
         
     | 
| 1661 | 
         
            +
                "126287": {
         
     | 
| 1662 | 
         
            +
                  "content": "<|reserved_token_203|>",
         
     | 
| 1663 | 
         
            +
                  "lstrip": false,
         
     | 
| 1664 | 
         
            +
                  "normalized": false,
         
     | 
| 1665 | 
         
            +
                  "rstrip": false,
         
     | 
| 1666 | 
         
            +
                  "single_word": false,
         
     | 
| 1667 | 
         
            +
                  "special": true
         
     | 
| 1668 | 
         
            +
                },
         
     | 
| 1669 | 
         
            +
                "126288": {
         
     | 
| 1670 | 
         
            +
                  "content": "<|reserved_token_204|>",
         
     | 
| 1671 | 
         
            +
                  "lstrip": false,
         
     | 
| 1672 | 
         
            +
                  "normalized": false,
         
     | 
| 1673 | 
         
            +
                  "rstrip": false,
         
     | 
| 1674 | 
         
            +
                  "single_word": false,
         
     | 
| 1675 | 
         
            +
                  "special": true
         
     | 
| 1676 | 
         
            +
                },
         
     | 
| 1677 | 
         
            +
                "126289": {
         
     | 
| 1678 | 
         
            +
                  "content": "<|reserved_token_205|>",
         
     | 
| 1679 | 
         
            +
                  "lstrip": false,
         
     | 
| 1680 | 
         
            +
                  "normalized": false,
         
     | 
| 1681 | 
         
            +
                  "rstrip": false,
         
     | 
| 1682 | 
         
            +
                  "single_word": false,
         
     | 
| 1683 | 
         
            +
                  "special": true
         
     | 
| 1684 | 
         
            +
                },
         
     | 
| 1685 | 
         
            +
                "126290": {
         
     | 
| 1686 | 
         
            +
                  "content": "<|reserved_token_206|>",
         
     | 
| 1687 | 
         
            +
                  "lstrip": false,
         
     | 
| 1688 | 
         
            +
                  "normalized": false,
         
     | 
| 1689 | 
         
            +
                  "rstrip": false,
         
     | 
| 1690 | 
         
            +
                  "single_word": false,
         
     | 
| 1691 | 
         
            +
                  "special": true
         
     | 
| 1692 | 
         
            +
                },
         
     | 
| 1693 | 
         
            +
                "126291": {
         
     | 
| 1694 | 
         
            +
                  "content": "<|reserved_token_207|>",
         
     | 
| 1695 | 
         
            +
                  "lstrip": false,
         
     | 
| 1696 | 
         
            +
                  "normalized": false,
         
     | 
| 1697 | 
         
            +
                  "rstrip": false,
         
     | 
| 1698 | 
         
            +
                  "single_word": false,
         
     | 
| 1699 | 
         
            +
                  "special": true
         
     | 
| 1700 | 
         
            +
                },
         
     | 
| 1701 | 
         
            +
                "126292": {
         
     | 
| 1702 | 
         
            +
                  "content": "<|reserved_token_208|>",
         
     | 
| 1703 | 
         
            +
                  "lstrip": false,
         
     | 
| 1704 | 
         
            +
                  "normalized": false,
         
     | 
| 1705 | 
         
            +
                  "rstrip": false,
         
     | 
| 1706 | 
         
            +
                  "single_word": false,
         
     | 
| 1707 | 
         
            +
                  "special": true
         
     | 
| 1708 | 
         
            +
                },
         
     | 
| 1709 | 
         
            +
                "126293": {
         
     | 
| 1710 | 
         
            +
                  "content": "<|reserved_token_209|>",
         
     | 
| 1711 | 
         
            +
                  "lstrip": false,
         
     | 
| 1712 | 
         
            +
                  "normalized": false,
         
     | 
| 1713 | 
         
            +
                  "rstrip": false,
         
     | 
| 1714 | 
         
            +
                  "single_word": false,
         
     | 
| 1715 | 
         
            +
                  "special": true
         
     | 
| 1716 | 
         
            +
                },
         
     | 
| 1717 | 
         
            +
                "126294": {
         
     | 
| 1718 | 
         
            +
                  "content": "<|reserved_token_210|>",
         
     | 
| 1719 | 
         
            +
                  "lstrip": false,
         
     | 
| 1720 | 
         
            +
                  "normalized": false,
         
     | 
| 1721 | 
         
            +
                  "rstrip": false,
         
     | 
| 1722 | 
         
            +
                  "single_word": false,
         
     | 
| 1723 | 
         
            +
                  "special": true
         
     | 
| 1724 | 
         
            +
                },
         
     | 
| 1725 | 
         
            +
                "126295": {
         
     | 
| 1726 | 
         
            +
                  "content": "<|reserved_token_211|>",
         
     | 
| 1727 | 
         
            +
                  "lstrip": false,
         
     | 
| 1728 | 
         
            +
                  "normalized": false,
         
     | 
| 1729 | 
         
            +
                  "rstrip": false,
         
     | 
| 1730 | 
         
            +
                  "single_word": false,
         
     | 
| 1731 | 
         
            +
                  "special": true
         
     | 
| 1732 | 
         
            +
                },
         
     | 
| 1733 | 
         
            +
                "126296": {
         
     | 
| 1734 | 
         
            +
                  "content": "<|reserved_token_212|>",
         
     | 
| 1735 | 
         
            +
                  "lstrip": false,
         
     | 
| 1736 | 
         
            +
                  "normalized": false,
         
     | 
| 1737 | 
         
            +
                  "rstrip": false,
         
     | 
| 1738 | 
         
            +
                  "single_word": false,
         
     | 
| 1739 | 
         
            +
                  "special": true
         
     | 
| 1740 | 
         
            +
                },
         
     | 
| 1741 | 
         
            +
                "126297": {
         
     | 
| 1742 | 
         
            +
                  "content": "<|reserved_token_213|>",
         
     | 
| 1743 | 
         
            +
                  "lstrip": false,
         
     | 
| 1744 | 
         
            +
                  "normalized": false,
         
     | 
| 1745 | 
         
            +
                  "rstrip": false,
         
     | 
| 1746 | 
         
            +
                  "single_word": false,
         
     | 
| 1747 | 
         
            +
                  "special": true
         
     | 
| 1748 | 
         
            +
                },
         
     | 
| 1749 | 
         
            +
                "126298": {
         
     | 
| 1750 | 
         
            +
                  "content": "<|reserved_token_214|>",
         
     | 
| 1751 | 
         
            +
                  "lstrip": false,
         
     | 
| 1752 | 
         
            +
                  "normalized": false,
         
     | 
| 1753 | 
         
            +
                  "rstrip": false,
         
     | 
| 1754 | 
         
            +
                  "single_word": false,
         
     | 
| 1755 | 
         
            +
                  "special": true
         
     | 
| 1756 | 
         
            +
                },
         
     | 
| 1757 | 
         
            +
                "126299": {
         
     | 
| 1758 | 
         
            +
                  "content": "<|reserved_token_215|>",
         
     | 
| 1759 | 
         
            +
                  "lstrip": false,
         
     | 
| 1760 | 
         
            +
                  "normalized": false,
         
     | 
| 1761 | 
         
            +
                  "rstrip": false,
         
     | 
| 1762 | 
         
            +
                  "single_word": false,
         
     | 
| 1763 | 
         
            +
                  "special": true
         
     | 
| 1764 | 
         
            +
                },
         
     | 
| 1765 | 
         
            +
                "126300": {
         
     | 
| 1766 | 
         
            +
                  "content": "<|reserved_token_216|>",
         
     | 
| 1767 | 
         
            +
                  "lstrip": false,
         
     | 
| 1768 | 
         
            +
                  "normalized": false,
         
     | 
| 1769 | 
         
            +
                  "rstrip": false,
         
     | 
| 1770 | 
         
            +
                  "single_word": false,
         
     | 
| 1771 | 
         
            +
                  "special": true
         
     | 
| 1772 | 
         
            +
                },
         
     | 
| 1773 | 
         
            +
                "126301": {
         
     | 
| 1774 | 
         
            +
                  "content": "<|reserved_token_217|>",
         
     | 
| 1775 | 
         
            +
                  "lstrip": false,
         
     | 
| 1776 | 
         
            +
                  "normalized": false,
         
     | 
| 1777 | 
         
            +
                  "rstrip": false,
         
     | 
| 1778 | 
         
            +
                  "single_word": false,
         
     | 
| 1779 | 
         
            +
                  "special": true
         
     | 
| 1780 | 
         
            +
                },
         
     | 
| 1781 | 
         
            +
                "126302": {
         
     | 
| 1782 | 
         
            +
                  "content": "<|reserved_token_218|>",
         
     | 
| 1783 | 
         
            +
                  "lstrip": false,
         
     | 
| 1784 | 
         
            +
                  "normalized": false,
         
     | 
| 1785 | 
         
            +
                  "rstrip": false,
         
     | 
| 1786 | 
         
            +
                  "single_word": false,
         
     | 
| 1787 | 
         
            +
                  "special": true
         
     | 
| 1788 | 
         
            +
                },
         
     | 
| 1789 | 
         
            +
                "126303": {
         
     | 
| 1790 | 
         
            +
                  "content": "<|reserved_token_219|>",
         
     | 
| 1791 | 
         
            +
                  "lstrip": false,
         
     | 
| 1792 | 
         
            +
                  "normalized": false,
         
     | 
| 1793 | 
         
            +
                  "rstrip": false,
         
     | 
| 1794 | 
         
            +
                  "single_word": false,
         
     | 
| 1795 | 
         
            +
                  "special": true
         
     | 
| 1796 | 
         
            +
                },
         
     | 
| 1797 | 
         
            +
                "126304": {
         
     | 
| 1798 | 
         
            +
                  "content": "<|reserved_token_220|>",
         
     | 
| 1799 | 
         
            +
                  "lstrip": false,
         
     | 
| 1800 | 
         
            +
                  "normalized": false,
         
     | 
| 1801 | 
         
            +
                  "rstrip": false,
         
     | 
| 1802 | 
         
            +
                  "single_word": false,
         
     | 
| 1803 | 
         
            +
                  "special": true
         
     | 
| 1804 | 
         
            +
                },
         
     | 
| 1805 | 
         
            +
                "126305": {
         
     | 
| 1806 | 
         
            +
                  "content": "<|reserved_token_221|>",
         
     | 
| 1807 | 
         
            +
                  "lstrip": false,
         
     | 
| 1808 | 
         
            +
                  "normalized": false,
         
     | 
| 1809 | 
         
            +
                  "rstrip": false,
         
     | 
| 1810 | 
         
            +
                  "single_word": false,
         
     | 
| 1811 | 
         
            +
                  "special": true
         
     | 
| 1812 | 
         
            +
                },
         
     | 
| 1813 | 
         
            +
                "126306": {
         
     | 
| 1814 | 
         
            +
                  "content": "<|reserved_token_222|>",
         
     | 
| 1815 | 
         
            +
                  "lstrip": false,
         
     | 
| 1816 | 
         
            +
                  "normalized": false,
         
     | 
| 1817 | 
         
            +
                  "rstrip": false,
         
     | 
| 1818 | 
         
            +
                  "single_word": false,
         
     | 
| 1819 | 
         
            +
                  "special": true
         
     | 
| 1820 | 
         
            +
                },
         
     | 
| 1821 | 
         
            +
                "126307": {
         
     | 
| 1822 | 
         
            +
                  "content": "<|reserved_token_223|>",
         
     | 
| 1823 | 
         
            +
                  "lstrip": false,
         
     | 
| 1824 | 
         
            +
                  "normalized": false,
         
     | 
| 1825 | 
         
            +
                  "rstrip": false,
         
     | 
| 1826 | 
         
            +
                  "single_word": false,
         
     | 
| 1827 | 
         
            +
                  "special": true
         
     | 
| 1828 | 
         
            +
                },
         
     | 
| 1829 | 
         
            +
                "126308": {
         
     | 
| 1830 | 
         
            +
                  "content": "<|reserved_token_224|>",
         
     | 
| 1831 | 
         
            +
                  "lstrip": false,
         
     | 
| 1832 | 
         
            +
                  "normalized": false,
         
     | 
| 1833 | 
         
            +
                  "rstrip": false,
         
     | 
| 1834 | 
         
            +
                  "single_word": false,
         
     | 
| 1835 | 
         
            +
                  "special": true
         
     | 
| 1836 | 
         
            +
                },
         
     | 
| 1837 | 
         
            +
                "126309": {
         
     | 
| 1838 | 
         
            +
                  "content": "<|reserved_token_225|>",
         
     | 
| 1839 | 
         
            +
                  "lstrip": false,
         
     | 
| 1840 | 
         
            +
                  "normalized": false,
         
     | 
| 1841 | 
         
            +
                  "rstrip": false,
         
     | 
| 1842 | 
         
            +
                  "single_word": false,
         
     | 
| 1843 | 
         
            +
                  "special": true
         
     | 
| 1844 | 
         
            +
                },
         
     | 
| 1845 | 
         
            +
                "126310": {
         
     | 
| 1846 | 
         
            +
                  "content": "<|reserved_token_226|>",
         
     | 
| 1847 | 
         
            +
                  "lstrip": false,
         
     | 
| 1848 | 
         
            +
                  "normalized": false,
         
     | 
| 1849 | 
         
            +
                  "rstrip": false,
         
     | 
| 1850 | 
         
            +
                  "single_word": false,
         
     | 
| 1851 | 
         
            +
                  "special": true
         
     | 
| 1852 | 
         
            +
                },
         
     | 
| 1853 | 
         
            +
                "126311": {
         
     | 
| 1854 | 
         
            +
                  "content": "<|reserved_token_227|>",
         
     | 
| 1855 | 
         
            +
                  "lstrip": false,
         
     | 
| 1856 | 
         
            +
                  "normalized": false,
         
     | 
| 1857 | 
         
            +
                  "rstrip": false,
         
     | 
| 1858 | 
         
            +
                  "single_word": false,
         
     | 
| 1859 | 
         
            +
                  "special": true
         
     | 
| 1860 | 
         
            +
                },
         
     | 
| 1861 | 
         
            +
                "126312": {
         
     | 
| 1862 | 
         
            +
                  "content": "<|reserved_token_228|>",
         
     | 
| 1863 | 
         
            +
                  "lstrip": false,
         
     | 
| 1864 | 
         
            +
                  "normalized": false,
         
     | 
| 1865 | 
         
            +
                  "rstrip": false,
         
     | 
| 1866 | 
         
            +
                  "single_word": false,
         
     | 
| 1867 | 
         
            +
                  "special": true
         
     | 
| 1868 | 
         
            +
                },
         
     | 
| 1869 | 
         
            +
                "126313": {
         
     | 
| 1870 | 
         
            +
                  "content": "<|reserved_token_229|>",
         
     | 
| 1871 | 
         
            +
                  "lstrip": false,
         
     | 
| 1872 | 
         
            +
                  "normalized": false,
         
     | 
| 1873 | 
         
            +
                  "rstrip": false,
         
     | 
| 1874 | 
         
            +
                  "single_word": false,
         
     | 
| 1875 | 
         
            +
                  "special": true
         
     | 
| 1876 | 
         
            +
                },
         
     | 
| 1877 | 
         
            +
                "126314": {
         
     | 
| 1878 | 
         
            +
                  "content": "<|reserved_token_230|>",
         
     | 
| 1879 | 
         
            +
                  "lstrip": false,
         
     | 
| 1880 | 
         
            +
                  "normalized": false,
         
     | 
| 1881 | 
         
            +
                  "rstrip": false,
         
     | 
| 1882 | 
         
            +
                  "single_word": false,
         
     | 
| 1883 | 
         
            +
                  "special": true
         
     | 
| 1884 | 
         
            +
                },
         
     | 
| 1885 | 
         
            +
                "126315": {
         
     | 
| 1886 | 
         
            +
                  "content": "<|reserved_token_231|>",
         
     | 
| 1887 | 
         
            +
                  "lstrip": false,
         
     | 
| 1888 | 
         
            +
                  "normalized": false,
         
     | 
| 1889 | 
         
            +
                  "rstrip": false,
         
     | 
| 1890 | 
         
            +
                  "single_word": false,
         
     | 
| 1891 | 
         
            +
                  "special": true
         
     | 
| 1892 | 
         
            +
                },
         
     | 
| 1893 | 
         
            +
                "126316": {
         
     | 
| 1894 | 
         
            +
                  "content": "<|reserved_token_232|>",
         
     | 
| 1895 | 
         
            +
                  "lstrip": false,
         
     | 
| 1896 | 
         
            +
                  "normalized": false,
         
     | 
| 1897 | 
         
            +
                  "rstrip": false,
         
     | 
| 1898 | 
         
            +
                  "single_word": false,
         
     | 
| 1899 | 
         
            +
                  "special": true
         
     | 
| 1900 | 
         
            +
                },
         
     | 
| 1901 | 
         
            +
                "126317": {
         
     | 
| 1902 | 
         
            +
                  "content": "<|reserved_token_233|>",
         
     | 
| 1903 | 
         
            +
                  "lstrip": false,
         
     | 
| 1904 | 
         
            +
                  "normalized": false,
         
     | 
| 1905 | 
         
            +
                  "rstrip": false,
         
     | 
| 1906 | 
         
            +
                  "single_word": false,
         
     | 
| 1907 | 
         
            +
                  "special": true
         
     | 
| 1908 | 
         
            +
                },
         
     | 
| 1909 | 
         
            +
                "126318": {
         
     | 
| 1910 | 
         
            +
                  "content": "<|reserved_token_234|>",
         
     | 
| 1911 | 
         
            +
                  "lstrip": false,
         
     | 
| 1912 | 
         
            +
                  "normalized": false,
         
     | 
| 1913 | 
         
            +
                  "rstrip": false,
         
     | 
| 1914 | 
         
            +
                  "single_word": false,
         
     | 
| 1915 | 
         
            +
                  "special": true
         
     | 
| 1916 | 
         
            +
                },
         
     | 
| 1917 | 
         
            +
                "126319": {
         
     | 
| 1918 | 
         
            +
                  "content": "<|reserved_token_235|>",
         
     | 
| 1919 | 
         
            +
                  "lstrip": false,
         
     | 
| 1920 | 
         
            +
                  "normalized": false,
         
     | 
| 1921 | 
         
            +
                  "rstrip": false,
         
     | 
| 1922 | 
         
            +
                  "single_word": false,
         
     | 
| 1923 | 
         
            +
                  "special": true
         
     | 
| 1924 | 
         
            +
                },
         
     | 
| 1925 | 
         
            +
                "126320": {
         
     | 
| 1926 | 
         
            +
                  "content": "<|reserved_token_236|>",
         
     | 
| 1927 | 
         
            +
                  "lstrip": false,
         
     | 
| 1928 | 
         
            +
                  "normalized": false,
         
     | 
| 1929 | 
         
            +
                  "rstrip": false,
         
     | 
| 1930 | 
         
            +
                  "single_word": false,
         
     | 
| 1931 | 
         
            +
                  "special": true
         
     | 
| 1932 | 
         
            +
                },
         
     | 
| 1933 | 
         
            +
                "126321": {
         
     | 
| 1934 | 
         
            +
                  "content": "<|reserved_token_237|>",
         
     | 
| 1935 | 
         
            +
                  "lstrip": false,
         
     | 
| 1936 | 
         
            +
                  "normalized": false,
         
     | 
| 1937 | 
         
            +
                  "rstrip": false,
         
     | 
| 1938 | 
         
            +
                  "single_word": false,
         
     | 
| 1939 | 
         
            +
                  "special": true
         
     | 
| 1940 | 
         
            +
                },
         
     | 
| 1941 | 
         
            +
                "126322": {
         
     | 
| 1942 | 
         
            +
                  "content": "<|reserved_token_238|>",
         
     | 
| 1943 | 
         
            +
                  "lstrip": false,
         
     | 
| 1944 | 
         
            +
                  "normalized": false,
         
     | 
| 1945 | 
         
            +
                  "rstrip": false,
         
     | 
| 1946 | 
         
            +
                  "single_word": false,
         
     | 
| 1947 | 
         
            +
                  "special": true
         
     | 
| 1948 | 
         
            +
                },
         
     | 
| 1949 | 
         
            +
                "126323": {
         
     | 
| 1950 | 
         
            +
                  "content": "<|reserved_token_239|>",
         
     | 
| 1951 | 
         
            +
                  "lstrip": false,
         
     | 
| 1952 | 
         
            +
                  "normalized": false,
         
     | 
| 1953 | 
         
            +
                  "rstrip": false,
         
     | 
| 1954 | 
         
            +
                  "single_word": false,
         
     | 
| 1955 | 
         
            +
                  "special": true
         
     | 
| 1956 | 
         
            +
                },
         
     | 
| 1957 | 
         
            +
                "126324": {
         
     | 
| 1958 | 
         
            +
                  "content": "<|reserved_token_240|>",
         
     | 
| 1959 | 
         
            +
                  "lstrip": false,
         
     | 
| 1960 | 
         
            +
                  "normalized": false,
         
     | 
| 1961 | 
         
            +
                  "rstrip": false,
         
     | 
| 1962 | 
         
            +
                  "single_word": false,
         
     | 
| 1963 | 
         
            +
                  "special": true
         
     | 
| 1964 | 
         
            +
                },
         
     | 
| 1965 | 
         
            +
                "126325": {
         
     | 
| 1966 | 
         
            +
                  "content": "<|reserved_token_241|>",
         
     | 
| 1967 | 
         
            +
                  "lstrip": false,
         
     | 
| 1968 | 
         
            +
                  "normalized": false,
         
     | 
| 1969 | 
         
            +
                  "rstrip": false,
         
     | 
| 1970 | 
         
            +
                  "single_word": false,
         
     | 
| 1971 | 
         
            +
                  "special": true
         
     | 
| 1972 | 
         
            +
                },
         
     | 
| 1973 | 
         
            +
                "126326": {
         
     | 
| 1974 | 
         
            +
                  "content": "<|reserved_token_242|>",
         
     | 
| 1975 | 
         
            +
                  "lstrip": false,
         
     | 
| 1976 | 
         
            +
                  "normalized": false,
         
     | 
| 1977 | 
         
            +
                  "rstrip": false,
         
     | 
| 1978 | 
         
            +
                  "single_word": false,
         
     | 
| 1979 | 
         
            +
                  "special": true
         
     | 
| 1980 | 
         
            +
                },
         
     | 
| 1981 | 
         
            +
                "126327": {
         
     | 
| 1982 | 
         
            +
                  "content": "<|reserved_token_243|>",
         
     | 
| 1983 | 
         
            +
                  "lstrip": false,
         
     | 
| 1984 | 
         
            +
                  "normalized": false,
         
     | 
| 1985 | 
         
            +
                  "rstrip": false,
         
     | 
| 1986 | 
         
            +
                  "single_word": false,
         
     | 
| 1987 | 
         
            +
                  "special": true
         
     | 
| 1988 | 
         
            +
                },
         
     | 
| 1989 | 
         
            +
                "126328": {
         
     | 
| 1990 | 
         
            +
                  "content": "<|reserved_token_244|>",
         
     | 
| 1991 | 
         
            +
                  "lstrip": false,
         
     | 
| 1992 | 
         
            +
                  "normalized": false,
         
     | 
| 1993 | 
         
            +
                  "rstrip": false,
         
     | 
| 1994 | 
         
            +
                  "single_word": false,
         
     | 
| 1995 | 
         
            +
                  "special": true
         
     | 
| 1996 | 
         
            +
                },
         
     | 
| 1997 | 
         
            +
                "126329": {
         
     | 
| 1998 | 
         
            +
                  "content": "<|reserved_token_245|>",
         
     | 
| 1999 | 
         
            +
                  "lstrip": false,
         
     | 
| 2000 | 
         
            +
                  "normalized": false,
         
     | 
| 2001 | 
         
            +
                  "rstrip": false,
         
     | 
| 2002 | 
         
            +
                  "single_word": false,
         
     | 
| 2003 | 
         
            +
                  "special": true
         
     | 
| 2004 | 
         
            +
                },
         
     | 
| 2005 | 
         
            +
                "126330": {
         
     | 
| 2006 | 
         
            +
                  "content": "<|reserved_token_246|>",
         
     | 
| 2007 | 
         
            +
                  "lstrip": false,
         
     | 
| 2008 | 
         
            +
                  "normalized": false,
         
     | 
| 2009 | 
         
            +
                  "rstrip": false,
         
     | 
| 2010 | 
         
            +
                  "single_word": false,
         
     | 
| 2011 | 
         
            +
                  "special": true
         
     | 
| 2012 | 
         
            +
                },
         
     | 
| 2013 | 
         
            +
                "126331": {
         
     | 
| 2014 | 
         
            +
                  "content": "<|reserved_token_247|>",
         
     | 
| 2015 | 
         
            +
                  "lstrip": false,
         
     | 
| 2016 | 
         
            +
                  "normalized": false,
         
     | 
| 2017 | 
         
            +
                  "rstrip": false,
         
     | 
| 2018 | 
         
            +
                  "single_word": false,
         
     | 
| 2019 | 
         
            +
                  "special": true
         
     | 
| 2020 | 
         
            +
                },
         
     | 
| 2021 | 
         
            +
                "126332": {
         
     | 
| 2022 | 
         
            +
                  "content": "<|reserved_token_248|>",
         
     | 
| 2023 | 
         
            +
                  "lstrip": false,
         
     | 
| 2024 | 
         
            +
                  "normalized": false,
         
     | 
| 2025 | 
         
            +
                  "rstrip": false,
         
     | 
| 2026 | 
         
            +
                  "single_word": false,
         
     | 
| 2027 | 
         
            +
                  "special": true
         
     | 
| 2028 | 
         
            +
                },
         
     | 
| 2029 | 
         
            +
                "126333": {
         
     | 
| 2030 | 
         
            +
                  "content": "<|reserved_token_249|>",
         
     | 
| 2031 | 
         
            +
                  "lstrip": false,
         
     | 
| 2032 | 
         
            +
                  "normalized": false,
         
     | 
| 2033 | 
         
            +
                  "rstrip": false,
         
     | 
| 2034 | 
         
            +
                  "single_word": false,
         
     | 
| 2035 | 
         
            +
                  "special": true
         
     | 
| 2036 | 
         
            +
                },
         
     | 
| 2037 | 
         
            +
                "126334": {
         
     | 
| 2038 | 
         
            +
                  "content": "<|reserved_token_250|>",
         
     | 
| 2039 | 
         
            +
                  "lstrip": false,
         
     | 
| 2040 | 
         
            +
                  "normalized": false,
         
     | 
| 2041 | 
         
            +
                  "rstrip": false,
         
     | 
| 2042 | 
         
            +
                  "single_word": false,
         
     | 
| 2043 | 
         
            +
                  "special": true
         
     | 
| 2044 | 
         
            +
                },
         
     | 
| 2045 | 
         
            +
                "126335": {
         
     | 
| 2046 | 
         
            +
                  "content": "<|reserved_token_251|>",
         
     | 
| 2047 | 
         
            +
                  "lstrip": false,
         
     | 
| 2048 | 
         
            +
                  "normalized": false,
         
     | 
| 2049 | 
         
            +
                  "rstrip": false,
         
     | 
| 2050 | 
         
            +
                  "single_word": false,
         
     | 
| 2051 | 
         
            +
                  "special": true
         
     | 
| 2052 | 
         
            +
                },
         
     | 
| 2053 | 
         
            +
                "126336": {
         
     | 
| 2054 | 
         
            +
                  "content": "<|mdm_mask|>",
         
     | 
| 2055 | 
         
            +
                  "lstrip": false,
         
     | 
| 2056 | 
         
            +
                  "normalized": false,
         
     | 
| 2057 | 
         
            +
                  "rstrip": false,
         
     | 
| 2058 | 
         
            +
                  "single_word": false,
         
     | 
| 2059 | 
         
            +
                  "special": true
         
     | 
| 2060 | 
         
            +
                },
         
     | 
| 2061 | 
         
            +
                "126337": {
         
     | 
| 2062 | 
         
            +
                  "content": "<|reserved_token_253|>",
         
     | 
| 2063 | 
         
            +
                  "lstrip": false,
         
     | 
| 2064 | 
         
            +
                  "normalized": false,
         
     | 
| 2065 | 
         
            +
                  "rstrip": false,
         
     | 
| 2066 | 
         
            +
                  "single_word": false,
         
     | 
| 2067 | 
         
            +
                  "special": true
         
     | 
| 2068 | 
         
            +
                },
         
     | 
| 2069 | 
         
            +
                "126338": {
         
     | 
| 2070 | 
         
            +
                  "content": "<|reserved_token_254|>",
         
     | 
| 2071 | 
         
            +
                  "lstrip": false,
         
     | 
| 2072 | 
         
            +
                  "normalized": false,
         
     | 
| 2073 | 
         
            +
                  "rstrip": false,
         
     | 
| 2074 | 
         
            +
                  "single_word": false,
         
     | 
| 2075 | 
         
            +
                  "special": true
         
     | 
| 2076 | 
         
            +
                },
         
     | 
| 2077 | 
         
            +
                "126339": {
         
     | 
| 2078 | 
         
            +
                  "content": "<|reserved_token_255|>",
         
     | 
| 2079 | 
         
            +
                  "lstrip": false,
         
     | 
| 2080 | 
         
            +
                  "normalized": false,
         
     | 
| 2081 | 
         
            +
                  "rstrip": false,
         
     | 
| 2082 | 
         
            +
                  "single_word": false,
         
     | 
| 2083 | 
         
            +
                  "special": true
         
     | 
| 2084 | 
         
            +
                },
         
     | 
| 2085 | 
         
            +
                "126340": {
         
     | 
| 2086 | 
         
            +
                  "content": "<role>",
         
     | 
| 2087 | 
         
            +
                  "lstrip": false,
         
     | 
| 2088 | 
         
            +
                  "normalized": false,
         
     | 
| 2089 | 
         
            +
                  "rstrip": false,
         
     | 
| 2090 | 
         
            +
                  "single_word": false,
         
     | 
| 2091 | 
         
            +
                  "special": true
         
     | 
| 2092 | 
         
            +
                },
         
     | 
| 2093 | 
         
            +
                "126341": {
         
     | 
| 2094 | 
         
            +
                  "content": "</role>",
         
     | 
| 2095 | 
         
            +
                  "lstrip": false,
         
     | 
| 2096 | 
         
            +
                  "normalized": false,
         
     | 
| 2097 | 
         
            +
                  "rstrip": false,
         
     | 
| 2098 | 
         
            +
                  "single_word": false,
         
     | 
| 2099 | 
         
            +
                  "special": true
         
     | 
| 2100 | 
         
            +
                },
         
     | 
| 2101 | 
         
            +
                "126342": {
         
     | 
| 2102 | 
         
            +
                  "content": "<|arithmetic_start|>",
         
     | 
| 2103 | 
         
            +
                  "lstrip": false,
         
     | 
| 2104 | 
         
            +
                  "normalized": false,
         
     | 
| 2105 | 
         
            +
                  "rstrip": false,
         
     | 
| 2106 | 
         
            +
                  "single_word": false,
         
     | 
| 2107 | 
         
            +
                  "special": true
         
     | 
| 2108 | 
         
            +
                },
         
     | 
| 2109 | 
         
            +
                "126343": {
         
     | 
| 2110 | 
         
            +
                  "content": "<|arithmetic_end|>",
         
     | 
| 2111 | 
         
            +
                  "lstrip": false,
         
     | 
| 2112 | 
         
            +
                  "normalized": false,
         
     | 
| 2113 | 
         
            +
                  "rstrip": false,
         
     | 
| 2114 | 
         
            +
                  "single_word": false,
         
     | 
| 2115 | 
         
            +
                  "special": true
         
     | 
| 2116 | 
         
            +
                },
         
     | 
| 2117 | 
         
            +
                "126344": {
         
     | 
| 2118 | 
         
            +
                  "content": "<|number_start|>",
         
     | 
| 2119 | 
         
            +
                  "lstrip": false,
         
     | 
| 2120 | 
         
            +
                  "normalized": false,
         
     | 
| 2121 | 
         
            +
                  "rstrip": false,
         
     | 
| 2122 | 
         
            +
                  "single_word": false,
         
     | 
| 2123 | 
         
            +
                  "special": true
         
     | 
| 2124 | 
         
            +
                },
         
     | 
| 2125 | 
         
            +
                "126345": {
         
     | 
| 2126 | 
         
            +
                  "content": "<|number_end|>",
         
     | 
| 2127 | 
         
            +
                  "lstrip": false,
         
     | 
| 2128 | 
         
            +
                  "normalized": false,
         
     | 
| 2129 | 
         
            +
                  "rstrip": false,
         
     | 
| 2130 | 
         
            +
                  "single_word": false,
         
     | 
| 2131 | 
         
            +
                  "special": true
         
     | 
| 2132 | 
         
            +
                },
         
     | 
| 2133 | 
         
            +
                "126346": {
         
     | 
| 2134 | 
         
            +
                  "content": "<|start_header_id|>",
         
     | 
| 2135 | 
         
            +
                  "lstrip": false,
         
     | 
| 2136 | 
         
            +
                  "normalized": false,
         
     | 
| 2137 | 
         
            +
                  "rstrip": false,
         
     | 
| 2138 | 
         
            +
                  "single_word": false,
         
     | 
| 2139 | 
         
            +
                  "special": true
         
     | 
| 2140 | 
         
            +
                },
         
     | 
| 2141 | 
         
            +
                "126347": {
         
     | 
| 2142 | 
         
            +
                  "content": "<|end_header_id|>",
         
     | 
| 2143 | 
         
            +
                  "lstrip": false,
         
     | 
| 2144 | 
         
            +
                  "normalized": false,
         
     | 
| 2145 | 
         
            +
                  "rstrip": false,
         
     | 
| 2146 | 
         
            +
                  "single_word": false,
         
     | 
| 2147 | 
         
            +
                  "special": true
         
     | 
| 2148 | 
         
            +
                },
         
     | 
| 2149 | 
         
            +
                "126348": {
         
     | 
| 2150 | 
         
            +
                  "content": "<|eot_id|>",
         
     | 
| 2151 | 
         
            +
                  "lstrip": false,
         
     | 
| 2152 | 
         
            +
                  "normalized": false,
         
     | 
| 2153 | 
         
            +
                  "rstrip": false,
         
     | 
| 2154 | 
         
            +
                  "single_word": false,
         
     | 
| 2155 | 
         
            +
                  "special": true
         
     | 
| 2156 | 
         
            +
                }
         
     | 
| 2157 | 
         
            +
              },
         
     | 
| 2158 | 
         
            +
              "additional_special_tokens": [
         
     | 
| 2159 | 
         
            +
                "<role>",
         
     | 
| 2160 | 
         
            +
                "</role>",
         
     | 
| 2161 | 
         
            +
                "<|arithmetic_start|>",
         
     | 
| 2162 | 
         
            +
                "<|arithmetic_end|>",
         
     | 
| 2163 | 
         
            +
                "<|number_start|>",
         
     | 
| 2164 | 
         
            +
                "<|number_end|>"
         
     | 
| 2165 | 
         
            +
              ],
         
     | 
| 2166 | 
         
            +
              "bos_token": "<|startoftext|>",
         
     | 
| 2167 | 
         
            +
              "chat_template": "{% set loop_messages = messages %}{% for message in loop_messages %}{% set content = '<|start_header_id|>' + message['role'] + '<|end_header_id|>\n\n'+ message['content'] | trim + '<|eot_id|>' %}{% if loop.index0 == 0 %}{% set content = bos_token + content %}{% endif %}{{ content }}{% endfor %}{{ '<|start_header_id|>assistant<|end_header_id|>\n\n' }}",
         
     | 
| 2168 | 
         
            +
              "clean_up_tokenization_spaces": false,
         
     | 
| 2169 | 
         
            +
              "cls_token": "[CLS]",
         
     | 
| 2170 | 
         
            +
              "eos_token": "<|endoftext|>",
         
     | 
| 2171 | 
         
            +
              "extra_special_tokens": {},
         
     | 
| 2172 | 
         
            +
              "fast_tokenizer": true,
         
     | 
| 2173 | 
         
            +
              "gmask_token": "[gMASK]",
         
     | 
| 2174 | 
         
            +
              "merges_file": null,
         
     | 
| 2175 | 
         
            +
              "model_input_names": [
         
     | 
| 2176 | 
         
            +
                "input_ids",
         
     | 
| 2177 | 
         
            +
                "attention_mask"
         
     | 
| 2178 | 
         
            +
              ],
         
     | 
| 2179 | 
         
            +
              "model_max_length": 2048,
         
     | 
| 2180 | 
         
            +
              "pad_token": "<|endoftext|>",
         
     | 
| 2181 | 
         
            +
              "padding_side": "right",
         
     | 
| 2182 | 
         
            +
              "tokenizer_class": "PreTrainedTokenizer",
         
     | 
| 2183 | 
         
            +
              "trust_remote_code": true
         
     | 
| 2184 | 
         
            +
            }
         
     | 
    	
        trainer_state.json
    ADDED
    
    | 
         @@ -0,0 +1,2309 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            {
         
     | 
| 2 | 
         
            +
              "best_global_step": null,
         
     | 
| 3 | 
         
            +
              "best_metric": null,
         
     | 
| 4 | 
         
            +
              "best_model_checkpoint": null,
         
     | 
| 5 | 
         
            +
              "epoch": 5.0,
         
     | 
| 6 | 
         
            +
              "eval_steps": 500,
         
     | 
| 7 | 
         
            +
              "global_step": 325,
         
     | 
| 8 | 
         
            +
              "is_hyper_param_search": false,
         
     | 
| 9 | 
         
            +
              "is_local_process_zero": true,
         
     | 
| 10 | 
         
            +
              "is_world_process_zero": true,
         
     | 
| 11 | 
         
            +
              "log_history": [
         
     | 
| 12 | 
         
            +
                {
         
     | 
| 13 | 
         
            +
                  "epoch": 0.01556420233463035,
         
     | 
| 14 | 
         
            +
                  "grad_norm": 32.460037697908916,
         
     | 
| 15 | 
         
            +
                  "learning_rate": 1.25e-06,
         
     | 
| 16 | 
         
            +
                  "loss": 2.0427,
         
     | 
| 17 | 
         
            +
                  "step": 1
         
     | 
| 18 | 
         
            +
                },
         
     | 
| 19 | 
         
            +
                {
         
     | 
| 20 | 
         
            +
                  "epoch": 0.0311284046692607,
         
     | 
| 21 | 
         
            +
                  "grad_norm": 14.314919163607806,
         
     | 
| 22 | 
         
            +
                  "learning_rate": 2.5e-06,
         
     | 
| 23 | 
         
            +
                  "loss": 1.8704,
         
     | 
| 24 | 
         
            +
                  "step": 2
         
     | 
| 25 | 
         
            +
                },
         
     | 
| 26 | 
         
            +
                {
         
     | 
| 27 | 
         
            +
                  "epoch": 0.04669260700389105,
         
     | 
| 28 | 
         
            +
                  "grad_norm": 27.137073787955334,
         
     | 
| 29 | 
         
            +
                  "learning_rate": 3.7500000000000005e-06,
         
     | 
| 30 | 
         
            +
                  "loss": 2.009,
         
     | 
| 31 | 
         
            +
                  "step": 3
         
     | 
| 32 | 
         
            +
                },
         
     | 
| 33 | 
         
            +
                {
         
     | 
| 34 | 
         
            +
                  "epoch": 0.0622568093385214,
         
     | 
| 35 | 
         
            +
                  "grad_norm": 22.855686347208184,
         
     | 
| 36 | 
         
            +
                  "learning_rate": 5e-06,
         
     | 
| 37 | 
         
            +
                  "loss": 2.2908,
         
     | 
| 38 | 
         
            +
                  "step": 4
         
     | 
| 39 | 
         
            +
                },
         
     | 
| 40 | 
         
            +
                {
         
     | 
| 41 | 
         
            +
                  "epoch": 0.07782101167315175,
         
     | 
| 42 | 
         
            +
                  "grad_norm": 17.021052812960836,
         
     | 
| 43 | 
         
            +
                  "learning_rate": 6.25e-06,
         
     | 
| 44 | 
         
            +
                  "loss": 1.6062,
         
     | 
| 45 | 
         
            +
                  "step": 5
         
     | 
| 46 | 
         
            +
                },
         
     | 
| 47 | 
         
            +
                {
         
     | 
| 48 | 
         
            +
                  "epoch": 0.0933852140077821,
         
     | 
| 49 | 
         
            +
                  "grad_norm": 8.04115371265731,
         
     | 
| 50 | 
         
            +
                  "learning_rate": 7.500000000000001e-06,
         
     | 
| 51 | 
         
            +
                  "loss": 1.6328,
         
     | 
| 52 | 
         
            +
                  "step": 6
         
     | 
| 53 | 
         
            +
                },
         
     | 
| 54 | 
         
            +
                {
         
     | 
| 55 | 
         
            +
                  "epoch": 0.10894941634241245,
         
     | 
| 56 | 
         
            +
                  "grad_norm": 8.684988862656194,
         
     | 
| 57 | 
         
            +
                  "learning_rate": 8.750000000000001e-06,
         
     | 
| 58 | 
         
            +
                  "loss": 1.5791,
         
     | 
| 59 | 
         
            +
                  "step": 7
         
     | 
| 60 | 
         
            +
                },
         
     | 
| 61 | 
         
            +
                {
         
     | 
| 62 | 
         
            +
                  "epoch": 0.1245136186770428,
         
     | 
| 63 | 
         
            +
                  "grad_norm": 10.489704115967008,
         
     | 
| 64 | 
         
            +
                  "learning_rate": 1e-05,
         
     | 
| 65 | 
         
            +
                  "loss": 1.437,
         
     | 
| 66 | 
         
            +
                  "step": 8
         
     | 
| 67 | 
         
            +
                },
         
     | 
| 68 | 
         
            +
                {
         
     | 
| 69 | 
         
            +
                  "epoch": 0.14007782101167315,
         
     | 
| 70 | 
         
            +
                  "grad_norm": 9.345852547867393,
         
     | 
| 71 | 
         
            +
                  "learning_rate": 1.125e-05,
         
     | 
| 72 | 
         
            +
                  "loss": 1.4409,
         
     | 
| 73 | 
         
            +
                  "step": 9
         
     | 
| 74 | 
         
            +
                },
         
     | 
| 75 | 
         
            +
                {
         
     | 
| 76 | 
         
            +
                  "epoch": 0.1556420233463035,
         
     | 
| 77 | 
         
            +
                  "grad_norm": 12.566418385513145,
         
     | 
| 78 | 
         
            +
                  "learning_rate": 1.25e-05,
         
     | 
| 79 | 
         
            +
                  "loss": 1.9836,
         
     | 
| 80 | 
         
            +
                  "step": 10
         
     | 
| 81 | 
         
            +
                },
         
     | 
| 82 | 
         
            +
                {
         
     | 
| 83 | 
         
            +
                  "epoch": 0.17120622568093385,
         
     | 
| 84 | 
         
            +
                  "grad_norm": 9.458631541156663,
         
     | 
| 85 | 
         
            +
                  "learning_rate": 1.375e-05,
         
     | 
| 86 | 
         
            +
                  "loss": 1.7625,
         
     | 
| 87 | 
         
            +
                  "step": 11
         
     | 
| 88 | 
         
            +
                },
         
     | 
| 89 | 
         
            +
                {
         
     | 
| 90 | 
         
            +
                  "epoch": 0.1867704280155642,
         
     | 
| 91 | 
         
            +
                  "grad_norm": 4.775804904067164,
         
     | 
| 92 | 
         
            +
                  "learning_rate": 1.5000000000000002e-05,
         
     | 
| 93 | 
         
            +
                  "loss": 1.3358,
         
     | 
| 94 | 
         
            +
                  "step": 12
         
     | 
| 95 | 
         
            +
                },
         
     | 
| 96 | 
         
            +
                {
         
     | 
| 97 | 
         
            +
                  "epoch": 0.20233463035019456,
         
     | 
| 98 | 
         
            +
                  "grad_norm": 7.063116954030999,
         
     | 
| 99 | 
         
            +
                  "learning_rate": 1.6250000000000002e-05,
         
     | 
| 100 | 
         
            +
                  "loss": 1.5127,
         
     | 
| 101 | 
         
            +
                  "step": 13
         
     | 
| 102 | 
         
            +
                },
         
     | 
| 103 | 
         
            +
                {
         
     | 
| 104 | 
         
            +
                  "epoch": 0.2178988326848249,
         
     | 
| 105 | 
         
            +
                  "grad_norm": 20.081862895733657,
         
     | 
| 106 | 
         
            +
                  "learning_rate": 1.7500000000000002e-05,
         
     | 
| 107 | 
         
            +
                  "loss": 1.6345,
         
     | 
| 108 | 
         
            +
                  "step": 14
         
     | 
| 109 | 
         
            +
                },
         
     | 
| 110 | 
         
            +
                {
         
     | 
| 111 | 
         
            +
                  "epoch": 0.23346303501945526,
         
     | 
| 112 | 
         
            +
                  "grad_norm": 7.4857791672008664,
         
     | 
| 113 | 
         
            +
                  "learning_rate": 1.8750000000000002e-05,
         
     | 
| 114 | 
         
            +
                  "loss": 1.6543,
         
     | 
| 115 | 
         
            +
                  "step": 15
         
     | 
| 116 | 
         
            +
                },
         
     | 
| 117 | 
         
            +
                {
         
     | 
| 118 | 
         
            +
                  "epoch": 0.2490272373540856,
         
     | 
| 119 | 
         
            +
                  "grad_norm": 5.930935178930693,
         
     | 
| 120 | 
         
            +
                  "learning_rate": 2e-05,
         
     | 
| 121 | 
         
            +
                  "loss": 1.3915,
         
     | 
| 122 | 
         
            +
                  "step": 16
         
     | 
| 123 | 
         
            +
                },
         
     | 
| 124 | 
         
            +
                {
         
     | 
| 125 | 
         
            +
                  "epoch": 0.26459143968871596,
         
     | 
| 126 | 
         
            +
                  "grad_norm": 4.891425231471704,
         
     | 
| 127 | 
         
            +
                  "learning_rate": 1.9999819470801393e-05,
         
     | 
| 128 | 
         
            +
                  "loss": 1.4524,
         
     | 
| 129 | 
         
            +
                  "step": 17
         
     | 
| 130 | 
         
            +
                },
         
     | 
| 131 | 
         
            +
                {
         
     | 
| 132 | 
         
            +
                  "epoch": 0.2801556420233463,
         
     | 
| 133 | 
         
            +
                  "grad_norm": 8.526785651246044,
         
     | 
| 134 | 
         
            +
                  "learning_rate": 1.999927789044796e-05,
         
     | 
| 135 | 
         
            +
                  "loss": 1.3936,
         
     | 
| 136 | 
         
            +
                  "step": 18
         
     | 
| 137 | 
         
            +
                },
         
     | 
| 138 | 
         
            +
                {
         
     | 
| 139 | 
         
            +
                  "epoch": 0.29571984435797666,
         
     | 
| 140 | 
         
            +
                  "grad_norm": 8.794855084727713,
         
     | 
| 141 | 
         
            +
                  "learning_rate": 1.9998375280666606e-05,
         
     | 
| 142 | 
         
            +
                  "loss": 1.4946,
         
     | 
| 143 | 
         
            +
                  "step": 19
         
     | 
| 144 | 
         
            +
                },
         
     | 
| 145 | 
         
            +
                {
         
     | 
| 146 | 
         
            +
                  "epoch": 0.311284046692607,
         
     | 
| 147 | 
         
            +
                  "grad_norm": 5.5605631475331,
         
     | 
| 148 | 
         
            +
                  "learning_rate": 1.9997111677667875e-05,
         
     | 
| 149 | 
         
            +
                  "loss": 1.5168,
         
     | 
| 150 | 
         
            +
                  "step": 20
         
     | 
| 151 | 
         
            +
                },
         
     | 
| 152 | 
         
            +
                {
         
     | 
| 153 | 
         
            +
                  "epoch": 0.32684824902723736,
         
     | 
| 154 | 
         
            +
                  "grad_norm": 6.109413863257978,
         
     | 
| 155 | 
         
            +
                  "learning_rate": 1.999548713214448e-05,
         
     | 
| 156 | 
         
            +
                  "loss": 1.3959,
         
     | 
| 157 | 
         
            +
                  "step": 21
         
     | 
| 158 | 
         
            +
                },
         
     | 
| 159 | 
         
            +
                {
         
     | 
| 160 | 
         
            +
                  "epoch": 0.3424124513618677,
         
     | 
| 161 | 
         
            +
                  "grad_norm": 8.216498243755654,
         
     | 
| 162 | 
         
            +
                  "learning_rate": 1.9993501709269297e-05,
         
     | 
| 163 | 
         
            +
                  "loss": 1.5457,
         
     | 
| 164 | 
         
            +
                  "step": 22
         
     | 
| 165 | 
         
            +
                },
         
     | 
| 166 | 
         
            +
                {
         
     | 
| 167 | 
         
            +
                  "epoch": 0.35797665369649806,
         
     | 
| 168 | 
         
            +
                  "grad_norm": 4.575471414036173,
         
     | 
| 169 | 
         
            +
                  "learning_rate": 1.9991155488692714e-05,
         
     | 
| 170 | 
         
            +
                  "loss": 1.3057,
         
     | 
| 171 | 
         
            +
                  "step": 23
         
     | 
| 172 | 
         
            +
                },
         
     | 
| 173 | 
         
            +
                {
         
     | 
| 174 | 
         
            +
                  "epoch": 0.3735408560311284,
         
     | 
| 175 | 
         
            +
                  "grad_norm": 7.713781786096019,
         
     | 
| 176 | 
         
            +
                  "learning_rate": 1.9988448564539475e-05,
         
     | 
| 177 | 
         
            +
                  "loss": 1.52,
         
     | 
| 178 | 
         
            +
                  "step": 24
         
     | 
| 179 | 
         
            +
                },
         
     | 
| 180 | 
         
            +
                {
         
     | 
| 181 | 
         
            +
                  "epoch": 0.38910505836575876,
         
     | 
| 182 | 
         
            +
                  "grad_norm": 9.333764622766337,
         
     | 
| 183 | 
         
            +
                  "learning_rate": 1.998538104540488e-05,
         
     | 
| 184 | 
         
            +
                  "loss": 1.2957,
         
     | 
| 185 | 
         
            +
                  "step": 25
         
     | 
| 186 | 
         
            +
                },
         
     | 
| 187 | 
         
            +
                {
         
     | 
| 188 | 
         
            +
                  "epoch": 0.4046692607003891,
         
     | 
| 189 | 
         
            +
                  "grad_norm": 6.153987069403218,
         
     | 
| 190 | 
         
            +
                  "learning_rate": 1.9981953054350436e-05,
         
     | 
| 191 | 
         
            +
                  "loss": 1.2496,
         
     | 
| 192 | 
         
            +
                  "step": 26
         
     | 
| 193 | 
         
            +
                },
         
     | 
| 194 | 
         
            +
                {
         
     | 
| 195 | 
         
            +
                  "epoch": 0.42023346303501946,
         
     | 
| 196 | 
         
            +
                  "grad_norm": 7.204269939964425,
         
     | 
| 197 | 
         
            +
                  "learning_rate": 1.997816472889891e-05,
         
     | 
| 198 | 
         
            +
                  "loss": 1.2556,
         
     | 
| 199 | 
         
            +
                  "step": 27
         
     | 
| 200 | 
         
            +
                },
         
     | 
| 201 | 
         
            +
                {
         
     | 
| 202 | 
         
            +
                  "epoch": 0.4357976653696498,
         
     | 
| 203 | 
         
            +
                  "grad_norm": 9.356556657699878,
         
     | 
| 204 | 
         
            +
                  "learning_rate": 1.9974016221028825e-05,
         
     | 
| 205 | 
         
            +
                  "loss": 1.3693,
         
     | 
| 206 | 
         
            +
                  "step": 28
         
     | 
| 207 | 
         
            +
                },
         
     | 
| 208 | 
         
            +
                {
         
     | 
| 209 | 
         
            +
                  "epoch": 0.45136186770428016,
         
     | 
| 210 | 
         
            +
                  "grad_norm": 18.674995074165327,
         
     | 
| 211 | 
         
            +
                  "learning_rate": 1.9969507697168372e-05,
         
     | 
| 212 | 
         
            +
                  "loss": 1.3901,
         
     | 
| 213 | 
         
            +
                  "step": 29
         
     | 
| 214 | 
         
            +
                },
         
     | 
| 215 | 
         
            +
                {
         
     | 
| 216 | 
         
            +
                  "epoch": 0.4669260700389105,
         
     | 
| 217 | 
         
            +
                  "grad_norm": 8.743718086448856,
         
     | 
| 218 | 
         
            +
                  "learning_rate": 1.996463933818869e-05,
         
     | 
| 219 | 
         
            +
                  "loss": 1.4637,
         
     | 
| 220 | 
         
            +
                  "step": 30
         
     | 
| 221 | 
         
            +
                },
         
     | 
| 222 | 
         
            +
                {
         
     | 
| 223 | 
         
            +
                  "epoch": 0.48249027237354086,
         
     | 
| 224 | 
         
            +
                  "grad_norm": 6.036142649788235,
         
     | 
| 225 | 
         
            +
                  "learning_rate": 1.9959411339396667e-05,
         
     | 
| 226 | 
         
            +
                  "loss": 1.446,
         
     | 
| 227 | 
         
            +
                  "step": 31
         
     | 
| 228 | 
         
            +
                },
         
     | 
| 229 | 
         
            +
                {
         
     | 
| 230 | 
         
            +
                  "epoch": 0.4980544747081712,
         
     | 
| 231 | 
         
            +
                  "grad_norm": 11.118098752442311,
         
     | 
| 232 | 
         
            +
                  "learning_rate": 1.9953823910527057e-05,
         
     | 
| 233 | 
         
            +
                  "loss": 1.2325,
         
     | 
| 234 | 
         
            +
                  "step": 32
         
     | 
| 235 | 
         
            +
                },
         
     | 
| 236 | 
         
            +
                {
         
     | 
| 237 | 
         
            +
                  "epoch": 0.5136186770428015,
         
     | 
| 238 | 
         
            +
                  "grad_norm": 8.532563003893063,
         
     | 
| 239 | 
         
            +
                  "learning_rate": 1.9947877275734103e-05,
         
     | 
| 240 | 
         
            +
                  "loss": 1.4182,
         
     | 
| 241 | 
         
            +
                  "step": 33
         
     | 
| 242 | 
         
            +
                },
         
     | 
| 243 | 
         
            +
                {
         
     | 
| 244 | 
         
            +
                  "epoch": 0.5291828793774319,
         
     | 
| 245 | 
         
            +
                  "grad_norm": 8.44093940432976,
         
     | 
| 246 | 
         
            +
                  "learning_rate": 1.9941571673582517e-05,
         
     | 
| 247 | 
         
            +
                  "loss": 1.519,
         
     | 
| 248 | 
         
            +
                  "step": 34
         
     | 
| 249 | 
         
            +
                },
         
     | 
| 250 | 
         
            +
                {
         
     | 
| 251 | 
         
            +
                  "epoch": 0.5447470817120622,
         
     | 
| 252 | 
         
            +
                  "grad_norm": 10.678108587266333,
         
     | 
| 253 | 
         
            +
                  "learning_rate": 1.9934907357037913e-05,
         
     | 
| 254 | 
         
            +
                  "loss": 1.4811,
         
     | 
| 255 | 
         
            +
                  "step": 35
         
     | 
| 256 | 
         
            +
                },
         
     | 
| 257 | 
         
            +
                {
         
     | 
| 258 | 
         
            +
                  "epoch": 0.5603112840466926,
         
     | 
| 259 | 
         
            +
                  "grad_norm": 12.279223122144039,
         
     | 
| 260 | 
         
            +
                  "learning_rate": 1.992788459345669e-05,
         
     | 
| 261 | 
         
            +
                  "loss": 1.5383,
         
     | 
| 262 | 
         
            +
                  "step": 36
         
     | 
| 263 | 
         
            +
                },
         
     | 
| 264 | 
         
            +
                {
         
     | 
| 265 | 
         
            +
                  "epoch": 0.5758754863813229,
         
     | 
| 266 | 
         
            +
                  "grad_norm": 9.6225514739576,
         
     | 
| 267 | 
         
            +
                  "learning_rate": 1.9920503664575252e-05,
         
     | 
| 268 | 
         
            +
                  "loss": 1.634,
         
     | 
| 269 | 
         
            +
                  "step": 37
         
     | 
| 270 | 
         
            +
                },
         
     | 
| 271 | 
         
            +
                {
         
     | 
| 272 | 
         
            +
                  "epoch": 0.5914396887159533,
         
     | 
| 273 | 
         
            +
                  "grad_norm": 6.816063682489534,
         
     | 
| 274 | 
         
            +
                  "learning_rate": 1.991276486649876e-05,
         
     | 
| 275 | 
         
            +
                  "loss": 1.4922,
         
     | 
| 276 | 
         
            +
                  "step": 38
         
     | 
| 277 | 
         
            +
                },
         
     | 
| 278 | 
         
            +
                {
         
     | 
| 279 | 
         
            +
                  "epoch": 0.6070038910505836,
         
     | 
| 280 | 
         
            +
                  "grad_norm": 8.462345745450083,
         
     | 
| 281 | 
         
            +
                  "learning_rate": 1.990466850968921e-05,
         
     | 
| 282 | 
         
            +
                  "loss": 1.4553,
         
     | 
| 283 | 
         
            +
                  "step": 39
         
     | 
| 284 | 
         
            +
                },
         
     | 
| 285 | 
         
            +
                {
         
     | 
| 286 | 
         
            +
                  "epoch": 0.622568093385214,
         
     | 
| 287 | 
         
            +
                  "grad_norm": 7.511393821964174,
         
     | 
| 288 | 
         
            +
                  "learning_rate": 1.9896214918953003e-05,
         
     | 
| 289 | 
         
            +
                  "loss": 1.613,
         
     | 
| 290 | 
         
            +
                  "step": 40
         
     | 
| 291 | 
         
            +
                },
         
     | 
| 292 | 
         
            +
                {
         
     | 
| 293 | 
         
            +
                  "epoch": 0.6381322957198443,
         
     | 
| 294 | 
         
            +
                  "grad_norm": 12.76100865490119,
         
     | 
| 295 | 
         
            +
                  "learning_rate": 1.9887404433427917e-05,
         
     | 
| 296 | 
         
            +
                  "loss": 1.3829,
         
     | 
| 297 | 
         
            +
                  "step": 41
         
     | 
| 298 | 
         
            +
                },
         
     | 
| 299 | 
         
            +
                {
         
     | 
| 300 | 
         
            +
                  "epoch": 0.6536964980544747,
         
     | 
| 301 | 
         
            +
                  "grad_norm": 9.953708209183798,
         
     | 
| 302 | 
         
            +
                  "learning_rate": 1.9878237406569476e-05,
         
     | 
| 303 | 
         
            +
                  "loss": 1.2018,
         
     | 
| 304 | 
         
            +
                  "step": 42
         
     | 
| 305 | 
         
            +
                },
         
     | 
| 306 | 
         
            +
                {
         
     | 
| 307 | 
         
            +
                  "epoch": 0.669260700389105,
         
     | 
| 308 | 
         
            +
                  "grad_norm": 17.646408078968005,
         
     | 
| 309 | 
         
            +
                  "learning_rate": 1.9868714206136787e-05,
         
     | 
| 310 | 
         
            +
                  "loss": 1.2234,
         
     | 
| 311 | 
         
            +
                  "step": 43
         
     | 
| 312 | 
         
            +
                },
         
     | 
| 313 | 
         
            +
                {
         
     | 
| 314 | 
         
            +
                  "epoch": 0.6848249027237354,
         
     | 
| 315 | 
         
            +
                  "grad_norm": 6.687546724266615,
         
     | 
| 316 | 
         
            +
                  "learning_rate": 1.985883521417781e-05,
         
     | 
| 317 | 
         
            +
                  "loss": 1.3619,
         
     | 
| 318 | 
         
            +
                  "step": 44
         
     | 
| 319 | 
         
            +
                },
         
     | 
| 320 | 
         
            +
                {
         
     | 
| 321 | 
         
            +
                  "epoch": 0.7003891050583657,
         
     | 
| 322 | 
         
            +
                  "grad_norm": 7.265965326318142,
         
     | 
| 323 | 
         
            +
                  "learning_rate": 1.9848600827013976e-05,
         
     | 
| 324 | 
         
            +
                  "loss": 1.3766,
         
     | 
| 325 | 
         
            +
                  "step": 45
         
     | 
| 326 | 
         
            +
                },
         
     | 
| 327 | 
         
            +
                {
         
     | 
| 328 | 
         
            +
                  "epoch": 0.7159533073929961,
         
     | 
| 329 | 
         
            +
                  "grad_norm": 10.47550352191459,
         
     | 
| 330 | 
         
            +
                  "learning_rate": 1.983801145522434e-05,
         
     | 
| 331 | 
         
            +
                  "loss": 1.4978,
         
     | 
| 332 | 
         
            +
                  "step": 46
         
     | 
| 333 | 
         
            +
                },
         
     | 
| 334 | 
         
            +
                {
         
     | 
| 335 | 
         
            +
                  "epoch": 0.7315175097276264,
         
     | 
| 336 | 
         
            +
                  "grad_norm": 7.719719591865551,
         
     | 
| 337 | 
         
            +
                  "learning_rate": 1.9827067523629075e-05,
         
     | 
| 338 | 
         
            +
                  "loss": 1.5026,
         
     | 
| 339 | 
         
            +
                  "step": 47
         
     | 
| 340 | 
         
            +
                },
         
     | 
| 341 | 
         
            +
                {
         
     | 
| 342 | 
         
            +
                  "epoch": 0.7470817120622568,
         
     | 
| 343 | 
         
            +
                  "grad_norm": 6.653131519406193,
         
     | 
| 344 | 
         
            +
                  "learning_rate": 1.981576947127245e-05,
         
     | 
| 345 | 
         
            +
                  "loss": 1.3853,
         
     | 
| 346 | 
         
            +
                  "step": 48
         
     | 
| 347 | 
         
            +
                },
         
     | 
| 348 | 
         
            +
                {
         
     | 
| 349 | 
         
            +
                  "epoch": 0.7626459143968871,
         
     | 
| 350 | 
         
            +
                  "grad_norm": 8.90485792562694,
         
     | 
| 351 | 
         
            +
                  "learning_rate": 1.9804117751405213e-05,
         
     | 
| 352 | 
         
            +
                  "loss": 1.4688,
         
     | 
| 353 | 
         
            +
                  "step": 49
         
     | 
| 354 | 
         
            +
                },
         
     | 
| 355 | 
         
            +
                {
         
     | 
| 356 | 
         
            +
                  "epoch": 0.7782101167315175,
         
     | 
| 357 | 
         
            +
                  "grad_norm": 7.409558877721469,
         
     | 
| 358 | 
         
            +
                  "learning_rate": 1.9792112831466385e-05,
         
     | 
| 359 | 
         
            +
                  "loss": 1.2318,
         
     | 
| 360 | 
         
            +
                  "step": 50
         
     | 
| 361 | 
         
            +
                },
         
     | 
| 362 | 
         
            +
                {
         
     | 
| 363 | 
         
            +
                  "epoch": 0.7937743190661478,
         
     | 
| 364 | 
         
            +
                  "grad_norm": 10.548282670671158,
         
     | 
| 365 | 
         
            +
                  "learning_rate": 1.9779755193064545e-05,
         
     | 
| 366 | 
         
            +
                  "loss": 1.1853,
         
     | 
| 367 | 
         
            +
                  "step": 51
         
     | 
| 368 | 
         
            +
                },
         
     | 
| 369 | 
         
            +
                {
         
     | 
| 370 | 
         
            +
                  "epoch": 0.8093385214007782,
         
     | 
| 371 | 
         
            +
                  "grad_norm": 8.195819265190133,
         
     | 
| 372 | 
         
            +
                  "learning_rate": 1.9767045331958486e-05,
         
     | 
| 373 | 
         
            +
                  "loss": 1.3867,
         
     | 
| 374 | 
         
            +
                  "step": 52
         
     | 
| 375 | 
         
            +
                },
         
     | 
| 376 | 
         
            +
                {
         
     | 
| 377 | 
         
            +
                  "epoch": 0.8249027237354085,
         
     | 
| 378 | 
         
            +
                  "grad_norm": 7.195118313650143,
         
     | 
| 379 | 
         
            +
                  "learning_rate": 1.9753983758037324e-05,
         
     | 
| 380 | 
         
            +
                  "loss": 1.3728,
         
     | 
| 381 | 
         
            +
                  "step": 53
         
     | 
| 382 | 
         
            +
                },
         
     | 
| 383 | 
         
            +
                {
         
     | 
| 384 | 
         
            +
                  "epoch": 0.8404669260700389,
         
     | 
| 385 | 
         
            +
                  "grad_norm": 6.865553942117972,
         
     | 
| 386 | 
         
            +
                  "learning_rate": 1.9740570995300054e-05,
         
     | 
| 387 | 
         
            +
                  "loss": 1.4971,
         
     | 
| 388 | 
         
            +
                  "step": 54
         
     | 
| 389 | 
         
            +
                },
         
     | 
| 390 | 
         
            +
                {
         
     | 
| 391 | 
         
            +
                  "epoch": 0.8560311284046692,
         
     | 
| 392 | 
         
            +
                  "grad_norm": 13.711210401882385,
         
     | 
| 393 | 
         
            +
                  "learning_rate": 1.9726807581834522e-05,
         
     | 
| 394 | 
         
            +
                  "loss": 1.5261,
         
     | 
| 395 | 
         
            +
                  "step": 55
         
     | 
| 396 | 
         
            +
                },
         
     | 
| 397 | 
         
            +
                {
         
     | 
| 398 | 
         
            +
                  "epoch": 0.8715953307392996,
         
     | 
| 399 | 
         
            +
                  "grad_norm": 5.9709078674205776,
         
     | 
| 400 | 
         
            +
                  "learning_rate": 1.971269406979584e-05,
         
     | 
| 401 | 
         
            +
                  "loss": 1.1547,
         
     | 
| 402 | 
         
            +
                  "step": 56
         
     | 
| 403 | 
         
            +
                },
         
     | 
| 404 | 
         
            +
                {
         
     | 
| 405 | 
         
            +
                  "epoch": 0.8871595330739299,
         
     | 
| 406 | 
         
            +
                  "grad_norm": 7.305358527853627,
         
     | 
| 407 | 
         
            +
                  "learning_rate": 1.9698231025384234e-05,
         
     | 
| 408 | 
         
            +
                  "loss": 1.2522,
         
     | 
| 409 | 
         
            +
                  "step": 57
         
     | 
| 410 | 
         
            +
                },
         
     | 
| 411 | 
         
            +
                {
         
     | 
| 412 | 
         
            +
                  "epoch": 0.9027237354085603,
         
     | 
| 413 | 
         
            +
                  "grad_norm": 7.144056189221613,
         
     | 
| 414 | 
         
            +
                  "learning_rate": 1.9683419028822333e-05,
         
     | 
| 415 | 
         
            +
                  "loss": 1.2921,
         
     | 
| 416 | 
         
            +
                  "step": 58
         
     | 
| 417 | 
         
            +
                },
         
     | 
| 418 | 
         
            +
                {
         
     | 
| 419 | 
         
            +
                  "epoch": 0.9182879377431906,
         
     | 
| 420 | 
         
            +
                  "grad_norm": 6.9606269877032005,
         
     | 
| 421 | 
         
            +
                  "learning_rate": 1.9668258674331882e-05,
         
     | 
| 422 | 
         
            +
                  "loss": 1.2708,
         
     | 
| 423 | 
         
            +
                  "step": 59
         
     | 
| 424 | 
         
            +
                },
         
     | 
| 425 | 
         
            +
                {
         
     | 
| 426 | 
         
            +
                  "epoch": 0.933852140077821,
         
     | 
| 427 | 
         
            +
                  "grad_norm": 9.722878510396836,
         
     | 
| 428 | 
         
            +
                  "learning_rate": 1.9652750570109914e-05,
         
     | 
| 429 | 
         
            +
                  "loss": 1.4427,
         
     | 
| 430 | 
         
            +
                  "step": 60
         
     | 
| 431 | 
         
            +
                },
         
     | 
| 432 | 
         
            +
                {
         
     | 
| 433 | 
         
            +
                  "epoch": 0.9494163424124513,
         
     | 
| 434 | 
         
            +
                  "grad_norm": 9.963909818863343,
         
     | 
| 435 | 
         
            +
                  "learning_rate": 1.9636895338304347e-05,
         
     | 
| 436 | 
         
            +
                  "loss": 1.3381,
         
     | 
| 437 | 
         
            +
                  "step": 61
         
     | 
| 438 | 
         
            +
                },
         
     | 
| 439 | 
         
            +
                {
         
     | 
| 440 | 
         
            +
                  "epoch": 0.9649805447470817,
         
     | 
| 441 | 
         
            +
                  "grad_norm": 6.446954560778193,
         
     | 
| 442 | 
         
            +
                  "learning_rate": 1.9620693614989024e-05,
         
     | 
| 443 | 
         
            +
                  "loss": 1.4269,
         
     | 
| 444 | 
         
            +
                  "step": 62
         
     | 
| 445 | 
         
            +
                },
         
     | 
| 446 | 
         
            +
                {
         
     | 
| 447 | 
         
            +
                  "epoch": 0.980544747081712,
         
     | 
| 448 | 
         
            +
                  "grad_norm": 7.603247343231994,
         
     | 
| 449 | 
         
            +
                  "learning_rate": 1.9604146050138194e-05,
         
     | 
| 450 | 
         
            +
                  "loss": 1.4298,
         
     | 
| 451 | 
         
            +
                  "step": 63
         
     | 
| 452 | 
         
            +
                },
         
     | 
| 453 | 
         
            +
                {
         
     | 
| 454 | 
         
            +
                  "epoch": 0.9961089494163424,
         
     | 
| 455 | 
         
            +
                  "grad_norm": 6.349264800380979,
         
     | 
| 456 | 
         
            +
                  "learning_rate": 1.958725330760044e-05,
         
     | 
| 457 | 
         
            +
                  "loss": 1.0356,
         
     | 
| 458 | 
         
            +
                  "step": 64
         
     | 
| 459 | 
         
            +
                },
         
     | 
| 460 | 
         
            +
                {
         
     | 
| 461 | 
         
            +
                  "epoch": 1.0,
         
     | 
| 462 | 
         
            +
                  "grad_norm": 6.349264800380979,
         
     | 
| 463 | 
         
            +
                  "learning_rate": 1.9570016065072047e-05,
         
     | 
| 464 | 
         
            +
                  "loss": 0.3835,
         
     | 
| 465 | 
         
            +
                  "step": 65
         
     | 
| 466 | 
         
            +
                },
         
     | 
| 467 | 
         
            +
                {
         
     | 
| 468 | 
         
            +
                  "epoch": 1.0155642023346303,
         
     | 
| 469 | 
         
            +
                  "grad_norm": 8.467306321422342,
         
     | 
| 470 | 
         
            +
                  "learning_rate": 1.9552435014069805e-05,
         
     | 
| 471 | 
         
            +
                  "loss": 1.329,
         
     | 
| 472 | 
         
            +
                  "step": 66
         
     | 
| 473 | 
         
            +
                },
         
     | 
| 474 | 
         
            +
                {
         
     | 
| 475 | 
         
            +
                  "epoch": 1.0311284046692606,
         
     | 
| 476 | 
         
            +
                  "grad_norm": 8.408957527442329,
         
     | 
| 477 | 
         
            +
                  "learning_rate": 1.953451085990329e-05,
         
     | 
| 478 | 
         
            +
                  "loss": 1.2351,
         
     | 
| 479 | 
         
            +
                  "step": 67
         
     | 
| 480 | 
         
            +
                },
         
     | 
| 481 | 
         
            +
                {
         
     | 
| 482 | 
         
            +
                  "epoch": 1.046692607003891,
         
     | 
| 483 | 
         
            +
                  "grad_norm": 13.406884070709882,
         
     | 
| 484 | 
         
            +
                  "learning_rate": 1.9516244321646533e-05,
         
     | 
| 485 | 
         
            +
                  "loss": 1.6201,
         
     | 
| 486 | 
         
            +
                  "step": 68
         
     | 
| 487 | 
         
            +
                },
         
     | 
| 488 | 
         
            +
                {
         
     | 
| 489 | 
         
            +
                  "epoch": 1.0622568093385214,
         
     | 
| 490 | 
         
            +
                  "grad_norm": 6.37594555257866,
         
     | 
| 491 | 
         
            +
                  "learning_rate": 1.9497636132109208e-05,
         
     | 
| 492 | 
         
            +
                  "loss": 1.3477,
         
     | 
| 493 | 
         
            +
                  "step": 69
         
     | 
| 494 | 
         
            +
                },
         
     | 
| 495 | 
         
            +
                {
         
     | 
| 496 | 
         
            +
                  "epoch": 1.0778210116731517,
         
     | 
| 497 | 
         
            +
                  "grad_norm": 6.084035911845763,
         
     | 
| 498 | 
         
            +
                  "learning_rate": 1.9478687037807215e-05,
         
     | 
| 499 | 
         
            +
                  "loss": 1.1285,
         
     | 
| 500 | 
         
            +
                  "step": 70
         
     | 
| 501 | 
         
            +
                },
         
     | 
| 502 | 
         
            +
                {
         
     | 
| 503 | 
         
            +
                  "epoch": 1.0933852140077822,
         
     | 
| 504 | 
         
            +
                  "grad_norm": 5.938233548564638,
         
     | 
| 505 | 
         
            +
                  "learning_rate": 1.9459397798932732e-05,
         
     | 
| 506 | 
         
            +
                  "loss": 1.2728,
         
     | 
| 507 | 
         
            +
                  "step": 71
         
     | 
| 508 | 
         
            +
                },
         
     | 
| 509 | 
         
            +
                {
         
     | 
| 510 | 
         
            +
                  "epoch": 1.1089494163424125,
         
     | 
| 511 | 
         
            +
                  "grad_norm": 4.999727686110884,
         
     | 
| 512 | 
         
            +
                  "learning_rate": 1.9439769189323727e-05,
         
     | 
| 513 | 
         
            +
                  "loss": 1.2983,
         
     | 
| 514 | 
         
            +
                  "step": 72
         
     | 
| 515 | 
         
            +
                },
         
     | 
| 516 | 
         
            +
                {
         
     | 
| 517 | 
         
            +
                  "epoch": 1.1245136186770428,
         
     | 
| 518 | 
         
            +
                  "grad_norm": 7.546246025502454,
         
     | 
| 519 | 
         
            +
                  "learning_rate": 1.9419801996432896e-05,
         
     | 
| 520 | 
         
            +
                  "loss": 1.2559,
         
     | 
| 521 | 
         
            +
                  "step": 73
         
     | 
| 522 | 
         
            +
                },
         
     | 
| 523 | 
         
            +
                {
         
     | 
| 524 | 
         
            +
                  "epoch": 1.140077821011673,
         
     | 
| 525 | 
         
            +
                  "grad_norm": 5.50857701371075,
         
     | 
| 526 | 
         
            +
                  "learning_rate": 1.9399497021296094e-05,
         
     | 
| 527 | 
         
            +
                  "loss": 1.2612,
         
     | 
| 528 | 
         
            +
                  "step": 74
         
     | 
| 529 | 
         
            +
                },
         
     | 
| 530 | 
         
            +
                {
         
     | 
| 531 | 
         
            +
                  "epoch": 1.1556420233463034,
         
     | 
| 532 | 
         
            +
                  "grad_norm": 19.755888394433814,
         
     | 
| 533 | 
         
            +
                  "learning_rate": 1.937885507850018e-05,
         
     | 
| 534 | 
         
            +
                  "loss": 1.3324,
         
     | 
| 535 | 
         
            +
                  "step": 75
         
     | 
| 536 | 
         
            +
                },
         
     | 
| 537 | 
         
            +
                {
         
     | 
| 538 | 
         
            +
                  "epoch": 1.171206225680934,
         
     | 
| 539 | 
         
            +
                  "grad_norm": 7.035191415935576,
         
     | 
| 540 | 
         
            +
                  "learning_rate": 1.935787699615036e-05,
         
     | 
| 541 | 
         
            +
                  "loss": 1.3674,
         
     | 
| 542 | 
         
            +
                  "step": 76
         
     | 
| 543 | 
         
            +
                },
         
     | 
| 544 | 
         
            +
                {
         
     | 
| 545 | 
         
            +
                  "epoch": 1.1867704280155642,
         
     | 
| 546 | 
         
            +
                  "grad_norm": 6.756030563749288,
         
     | 
| 547 | 
         
            +
                  "learning_rate": 1.933656361583694e-05,
         
     | 
| 548 | 
         
            +
                  "loss": 1.183,
         
     | 
| 549 | 
         
            +
                  "step": 77
         
     | 
| 550 | 
         
            +
                },
         
     | 
| 551 | 
         
            +
                {
         
     | 
| 552 | 
         
            +
                  "epoch": 1.2023346303501945,
         
     | 
| 553 | 
         
            +
                  "grad_norm": 8.841867408302369,
         
     | 
| 554 | 
         
            +
                  "learning_rate": 1.931491579260158e-05,
         
     | 
| 555 | 
         
            +
                  "loss": 1.2485,
         
     | 
| 556 | 
         
            +
                  "step": 78
         
     | 
| 557 | 
         
            +
                },
         
     | 
| 558 | 
         
            +
                {
         
     | 
| 559 | 
         
            +
                  "epoch": 1.217898832684825,
         
     | 
| 560 | 
         
            +
                  "grad_norm": 5.387317154730147,
         
     | 
| 561 | 
         
            +
                  "learning_rate": 1.9292934394902992e-05,
         
     | 
| 562 | 
         
            +
                  "loss": 1.5021,
         
     | 
| 563 | 
         
            +
                  "step": 79
         
     | 
| 564 | 
         
            +
                },
         
     | 
| 565 | 
         
            +
                {
         
     | 
| 566 | 
         
            +
                  "epoch": 1.2334630350194553,
         
     | 
| 567 | 
         
            +
                  "grad_norm": 14.871501309985415,
         
     | 
| 568 | 
         
            +
                  "learning_rate": 1.9270620304582077e-05,
         
     | 
| 569 | 
         
            +
                  "loss": 1.5676,
         
     | 
| 570 | 
         
            +
                  "step": 80
         
     | 
| 571 | 
         
            +
                },
         
     | 
| 572 | 
         
            +
                {
         
     | 
| 573 | 
         
            +
                  "epoch": 1.2490272373540856,
         
     | 
| 574 | 
         
            +
                  "grad_norm": 10.997862717881436,
         
     | 
| 575 | 
         
            +
                  "learning_rate": 1.9247974416826585e-05,
         
     | 
| 576 | 
         
            +
                  "loss": 1.3707,
         
     | 
| 577 | 
         
            +
                  "step": 81
         
     | 
| 578 | 
         
            +
                },
         
     | 
| 579 | 
         
            +
                {
         
     | 
| 580 | 
         
            +
                  "epoch": 1.264591439688716,
         
     | 
| 581 | 
         
            +
                  "grad_norm": 7.2507989168336735,
         
     | 
| 582 | 
         
            +
                  "learning_rate": 1.922499764013518e-05,
         
     | 
| 583 | 
         
            +
                  "loss": 1.5842,
         
     | 
| 584 | 
         
            +
                  "step": 82
         
     | 
| 585 | 
         
            +
                },
         
     | 
| 586 | 
         
            +
                {
         
     | 
| 587 | 
         
            +
                  "epoch": 1.2801556420233462,
         
     | 
| 588 | 
         
            +
                  "grad_norm": 9.537768676771838,
         
     | 
| 589 | 
         
            +
                  "learning_rate": 1.920169089628099e-05,
         
     | 
| 590 | 
         
            +
                  "loss": 1.4067,
         
     | 
| 591 | 
         
            +
                  "step": 83
         
     | 
| 592 | 
         
            +
                },
         
     | 
| 593 | 
         
            +
                {
         
     | 
| 594 | 
         
            +
                  "epoch": 1.2957198443579767,
         
     | 
| 595 | 
         
            +
                  "grad_norm": 8.163551805381035,
         
     | 
| 596 | 
         
            +
                  "learning_rate": 1.9178055120274625e-05,
         
     | 
| 597 | 
         
            +
                  "loss": 1.2465,
         
     | 
| 598 | 
         
            +
                  "step": 84
         
     | 
| 599 | 
         
            +
                },
         
     | 
| 600 | 
         
            +
                {
         
     | 
| 601 | 
         
            +
                  "epoch": 1.311284046692607,
         
     | 
| 602 | 
         
            +
                  "grad_norm": 6.640431551160505,
         
     | 
| 603 | 
         
            +
                  "learning_rate": 1.9154091260326698e-05,
         
     | 
| 604 | 
         
            +
                  "loss": 1.3956,
         
     | 
| 605 | 
         
            +
                  "step": 85
         
     | 
| 606 | 
         
            +
                },
         
     | 
| 607 | 
         
            +
                {
         
     | 
| 608 | 
         
            +
                  "epoch": 1.3268482490272373,
         
     | 
| 609 | 
         
            +
                  "grad_norm": 26.20900323475498,
         
     | 
| 610 | 
         
            +
                  "learning_rate": 1.9129800277809742e-05,
         
     | 
| 611 | 
         
            +
                  "loss": 1.2936,
         
     | 
| 612 | 
         
            +
                  "step": 86
         
     | 
| 613 | 
         
            +
                },
         
     | 
| 614 | 
         
            +
                {
         
     | 
| 615 | 
         
            +
                  "epoch": 1.3424124513618678,
         
     | 
| 616 | 
         
            +
                  "grad_norm": 9.219414651764641,
         
     | 
| 617 | 
         
            +
                  "learning_rate": 1.910518314721967e-05,
         
     | 
| 618 | 
         
            +
                  "loss": 1.5675,
         
     | 
| 619 | 
         
            +
                  "step": 87
         
     | 
| 620 | 
         
            +
                },
         
     | 
| 621 | 
         
            +
                {
         
     | 
| 622 | 
         
            +
                  "epoch": 1.3579766536964981,
         
     | 
| 623 | 
         
            +
                  "grad_norm": 6.169958007250397,
         
     | 
| 624 | 
         
            +
                  "learning_rate": 1.9080240856136675e-05,
         
     | 
| 625 | 
         
            +
                  "loss": 1.51,
         
     | 
| 626 | 
         
            +
                  "step": 88
         
     | 
| 627 | 
         
            +
                },
         
     | 
| 628 | 
         
            +
                {
         
     | 
| 629 | 
         
            +
                  "epoch": 1.3735408560311284,
         
     | 
| 630 | 
         
            +
                  "grad_norm": 7.775254406585112,
         
     | 
| 631 | 
         
            +
                  "learning_rate": 1.9054974405185605e-05,
         
     | 
| 632 | 
         
            +
                  "loss": 1.5837,
         
     | 
| 633 | 
         
            +
                  "step": 89
         
     | 
| 634 | 
         
            +
                },
         
     | 
| 635 | 
         
            +
                {
         
     | 
| 636 | 
         
            +
                  "epoch": 1.3891050583657587,
         
     | 
| 637 | 
         
            +
                  "grad_norm": 6.000524431156958,
         
     | 
| 638 | 
         
            +
                  "learning_rate": 1.902938480799583e-05,
         
     | 
| 639 | 
         
            +
                  "loss": 1.239,
         
     | 
| 640 | 
         
            +
                  "step": 90
         
     | 
| 641 | 
         
            +
                },
         
     | 
| 642 | 
         
            +
                {
         
     | 
| 643 | 
         
            +
                  "epoch": 1.404669260700389,
         
     | 
| 644 | 
         
            +
                  "grad_norm": 6.445928086057122,
         
     | 
| 645 | 
         
            +
                  "learning_rate": 1.9003473091160557e-05,
         
     | 
| 646 | 
         
            +
                  "loss": 1.2915,
         
     | 
| 647 | 
         
            +
                  "step": 91
         
     | 
| 648 | 
         
            +
                },
         
     | 
| 649 | 
         
            +
                {
         
     | 
| 650 | 
         
            +
                  "epoch": 1.4202334630350195,
         
     | 
| 651 | 
         
            +
                  "grad_norm": 8.175071297255242,
         
     | 
| 652 | 
         
            +
                  "learning_rate": 1.8977240294195676e-05,
         
     | 
| 653 | 
         
            +
                  "loss": 1.5306,
         
     | 
| 654 | 
         
            +
                  "step": 92
         
     | 
| 655 | 
         
            +
                },
         
     | 
| 656 | 
         
            +
                {
         
     | 
| 657 | 
         
            +
                  "epoch": 1.4357976653696498,
         
     | 
| 658 | 
         
            +
                  "grad_norm": 5.720210778577455,
         
     | 
| 659 | 
         
            +
                  "learning_rate": 1.895068746949803e-05,
         
     | 
| 660 | 
         
            +
                  "loss": 1.3159,
         
     | 
| 661 | 
         
            +
                  "step": 93
         
     | 
| 662 | 
         
            +
                },
         
     | 
| 663 | 
         
            +
                {
         
     | 
| 664 | 
         
            +
                  "epoch": 1.45136186770428,
         
     | 
| 665 | 
         
            +
                  "grad_norm": 9.833046906184748,
         
     | 
| 666 | 
         
            +
                  "learning_rate": 1.8923815682303214e-05,
         
     | 
| 667 | 
         
            +
                  "loss": 1.553,
         
     | 
| 668 | 
         
            +
                  "step": 94
         
     | 
| 669 | 
         
            +
                },
         
     | 
| 670 | 
         
            +
                {
         
     | 
| 671 | 
         
            +
                  "epoch": 1.4669260700389106,
         
     | 
| 672 | 
         
            +
                  "grad_norm": 6.851775208018755,
         
     | 
| 673 | 
         
            +
                  "learning_rate": 1.8896626010642833e-05,
         
     | 
| 674 | 
         
            +
                  "loss": 1.5885,
         
     | 
| 675 | 
         
            +
                  "step": 95
         
     | 
| 676 | 
         
            +
                },
         
     | 
| 677 | 
         
            +
                {
         
     | 
| 678 | 
         
            +
                  "epoch": 1.482490272373541,
         
     | 
| 679 | 
         
            +
                  "grad_norm": 7.1890477833480295,
         
     | 
| 680 | 
         
            +
                  "learning_rate": 1.886911954530124e-05,
         
     | 
| 681 | 
         
            +
                  "loss": 1.5322,
         
     | 
| 682 | 
         
            +
                  "step": 96
         
     | 
| 683 | 
         
            +
                },
         
     | 
| 684 | 
         
            +
                {
         
     | 
| 685 | 
         
            +
                  "epoch": 1.4980544747081712,
         
     | 
| 686 | 
         
            +
                  "grad_norm": 8.094895085879692,
         
     | 
| 687 | 
         
            +
                  "learning_rate": 1.884129738977181e-05,
         
     | 
| 688 | 
         
            +
                  "loss": 1.326,
         
     | 
| 689 | 
         
            +
                  "step": 97
         
     | 
| 690 | 
         
            +
                },
         
     | 
| 691 | 
         
            +
                {
         
     | 
| 692 | 
         
            +
                  "epoch": 1.5136186770428015,
         
     | 
| 693 | 
         
            +
                  "grad_norm": 9.604855227288974,
         
     | 
| 694 | 
         
            +
                  "learning_rate": 1.8813160660212636e-05,
         
     | 
| 695 | 
         
            +
                  "loss": 1.4432,
         
     | 
| 696 | 
         
            +
                  "step": 98
         
     | 
| 697 | 
         
            +
                },
         
     | 
| 698 | 
         
            +
                {
         
     | 
| 699 | 
         
            +
                  "epoch": 1.5291828793774318,
         
     | 
| 700 | 
         
            +
                  "grad_norm": 10.150820230299194,
         
     | 
| 701 | 
         
            +
                  "learning_rate": 1.8784710485401775e-05,
         
     | 
| 702 | 
         
            +
                  "loss": 1.2174,
         
     | 
| 703 | 
         
            +
                  "step": 99
         
     | 
| 704 | 
         
            +
                },
         
     | 
| 705 | 
         
            +
                {
         
     | 
| 706 | 
         
            +
                  "epoch": 1.544747081712062,
         
     | 
| 707 | 
         
            +
                  "grad_norm": 9.238169258331935,
         
     | 
| 708 | 
         
            +
                  "learning_rate": 1.875594800669195e-05,
         
     | 
| 709 | 
         
            +
                  "loss": 1.2617,
         
     | 
| 710 | 
         
            +
                  "step": 100
         
     | 
| 711 | 
         
            +
                },
         
     | 
| 712 | 
         
            +
                {
         
     | 
| 713 | 
         
            +
                  "epoch": 1.5603112840466926,
         
     | 
| 714 | 
         
            +
                  "grad_norm": 9.441159749907698,
         
     | 
| 715 | 
         
            +
                  "learning_rate": 1.8726874377964764e-05,
         
     | 
| 716 | 
         
            +
                  "loss": 1.234,
         
     | 
| 717 | 
         
            +
                  "step": 101
         
     | 
| 718 | 
         
            +
                },
         
     | 
| 719 | 
         
            +
                {
         
     | 
| 720 | 
         
            +
                  "epoch": 1.575875486381323,
         
     | 
| 721 | 
         
            +
                  "grad_norm": 7.2474587450825,
         
     | 
| 722 | 
         
            +
                  "learning_rate": 1.869749076558442e-05,
         
     | 
| 723 | 
         
            +
                  "loss": 1.1766,
         
     | 
| 724 | 
         
            +
                  "step": 102
         
     | 
| 725 | 
         
            +
                },
         
     | 
| 726 | 
         
            +
                {
         
     | 
| 727 | 
         
            +
                  "epoch": 1.5914396887159534,
         
     | 
| 728 | 
         
            +
                  "grad_norm": 6.23841795541216,
         
     | 
| 729 | 
         
            +
                  "learning_rate": 1.8667798348350918e-05,
         
     | 
| 730 | 
         
            +
                  "loss": 1.2045,
         
     | 
| 731 | 
         
            +
                  "step": 103
         
     | 
| 732 | 
         
            +
                },
         
     | 
| 733 | 
         
            +
                {
         
     | 
| 734 | 
         
            +
                  "epoch": 1.6070038910505837,
         
     | 
| 735 | 
         
            +
                  "grad_norm": 6.554762465264489,
         
     | 
| 736 | 
         
            +
                  "learning_rate": 1.863779831745276e-05,
         
     | 
| 737 | 
         
            +
                  "loss": 1.1382,
         
     | 
| 738 | 
         
            +
                  "step": 104
         
     | 
| 739 | 
         
            +
                },
         
     | 
| 740 | 
         
            +
                {
         
     | 
| 741 | 
         
            +
                  "epoch": 1.622568093385214,
         
     | 
| 742 | 
         
            +
                  "grad_norm": 5.997892924979543,
         
     | 
| 743 | 
         
            +
                  "learning_rate": 1.8607491876419183e-05,
         
     | 
| 744 | 
         
            +
                  "loss": 1.4172,
         
     | 
| 745 | 
         
            +
                  "step": 105
         
     | 
| 746 | 
         
            +
                },
         
     | 
| 747 | 
         
            +
                {
         
     | 
| 748 | 
         
            +
                  "epoch": 1.6381322957198443,
         
     | 
| 749 | 
         
            +
                  "grad_norm": 6.371740787277364,
         
     | 
| 750 | 
         
            +
                  "learning_rate": 1.8576880241071852e-05,
         
     | 
| 751 | 
         
            +
                  "loss": 1.3726,
         
     | 
| 752 | 
         
            +
                  "step": 106
         
     | 
| 753 | 
         
            +
                },
         
     | 
| 754 | 
         
            +
                {
         
     | 
| 755 | 
         
            +
                  "epoch": 1.6536964980544746,
         
     | 
| 756 | 
         
            +
                  "grad_norm": 7.86316264854559,
         
     | 
| 757 | 
         
            +
                  "learning_rate": 1.8545964639476105e-05,
         
     | 
| 758 | 
         
            +
                  "loss": 1.3789,
         
     | 
| 759 | 
         
            +
                  "step": 107
         
     | 
| 760 | 
         
            +
                },
         
     | 
| 761 | 
         
            +
                {
         
     | 
| 762 | 
         
            +
                  "epoch": 1.669260700389105,
         
     | 
| 763 | 
         
            +
                  "grad_norm": 7.417730516932254,
         
     | 
| 764 | 
         
            +
                  "learning_rate": 1.851474631189167e-05,
         
     | 
| 765 | 
         
            +
                  "loss": 1.2634,
         
     | 
| 766 | 
         
            +
                  "step": 108
         
     | 
| 767 | 
         
            +
                },
         
     | 
| 768 | 
         
            +
                {
         
     | 
| 769 | 
         
            +
                  "epoch": 1.6848249027237354,
         
     | 
| 770 | 
         
            +
                  "grad_norm": 8.222005228354707,
         
     | 
| 771 | 
         
            +
                  "learning_rate": 1.848322651072291e-05,
         
     | 
| 772 | 
         
            +
                  "loss": 1.4934,
         
     | 
| 773 | 
         
            +
                  "step": 109
         
     | 
| 774 | 
         
            +
                },
         
     | 
| 775 | 
         
            +
                {
         
     | 
| 776 | 
         
            +
                  "epoch": 1.7003891050583657,
         
     | 
| 777 | 
         
            +
                  "grad_norm": 6.747214231413209,
         
     | 
| 778 | 
         
            +
                  "learning_rate": 1.8451406500468598e-05,
         
     | 
| 779 | 
         
            +
                  "loss": 1.2275,
         
     | 
| 780 | 
         
            +
                  "step": 110
         
     | 
| 781 | 
         
            +
                },
         
     | 
| 782 | 
         
            +
                {
         
     | 
| 783 | 
         
            +
                  "epoch": 1.7159533073929962,
         
     | 
| 784 | 
         
            +
                  "grad_norm": 7.522875546614491,
         
     | 
| 785 | 
         
            +
                  "learning_rate": 1.841928755767116e-05,
         
     | 
| 786 | 
         
            +
                  "loss": 1.4232,
         
     | 
| 787 | 
         
            +
                  "step": 111
         
     | 
| 788 | 
         
            +
                },
         
     | 
| 789 | 
         
            +
                {
         
     | 
| 790 | 
         
            +
                  "epoch": 1.7315175097276265,
         
     | 
| 791 | 
         
            +
                  "grad_norm": 8.395027504519643,
         
     | 
| 792 | 
         
            +
                  "learning_rate": 1.8386870970865488e-05,
         
     | 
| 793 | 
         
            +
                  "loss": 1.4617,
         
     | 
| 794 | 
         
            +
                  "step": 112
         
     | 
| 795 | 
         
            +
                },
         
     | 
| 796 | 
         
            +
                {
         
     | 
| 797 | 
         
            +
                  "epoch": 1.7470817120622568,
         
     | 
| 798 | 
         
            +
                  "grad_norm": 5.682565677158726,
         
     | 
| 799 | 
         
            +
                  "learning_rate": 1.835415804052724e-05,
         
     | 
| 800 | 
         
            +
                  "loss": 1.2782,
         
     | 
| 801 | 
         
            +
                  "step": 113
         
     | 
| 802 | 
         
            +
                },
         
     | 
| 803 | 
         
            +
                {
         
     | 
| 804 | 
         
            +
                  "epoch": 1.7626459143968871,
         
     | 
| 805 | 
         
            +
                  "grad_norm": 17.319126618522,
         
     | 
| 806 | 
         
            +
                  "learning_rate": 1.8321150079020656e-05,
         
     | 
| 807 | 
         
            +
                  "loss": 1.2252,
         
     | 
| 808 | 
         
            +
                  "step": 114
         
     | 
| 809 | 
         
            +
                },
         
     | 
| 810 | 
         
            +
                {
         
     | 
| 811 | 
         
            +
                  "epoch": 1.7782101167315174,
         
     | 
| 812 | 
         
            +
                  "grad_norm": 6.698269746955856,
         
     | 
| 813 | 
         
            +
                  "learning_rate": 1.8287848410545922e-05,
         
     | 
| 814 | 
         
            +
                  "loss": 1.2638,
         
     | 
| 815 | 
         
            +
                  "step": 115
         
     | 
| 816 | 
         
            +
                },
         
     | 
| 817 | 
         
            +
                {
         
     | 
| 818 | 
         
            +
                  "epoch": 1.7937743190661477,
         
     | 
| 819 | 
         
            +
                  "grad_norm": 5.389193784149641,
         
     | 
| 820 | 
         
            +
                  "learning_rate": 1.825425437108605e-05,
         
     | 
| 821 | 
         
            +
                  "loss": 1.2101,
         
     | 
| 822 | 
         
            +
                  "step": 116
         
     | 
| 823 | 
         
            +
                },
         
     | 
| 824 | 
         
            +
                {
         
     | 
| 825 | 
         
            +
                  "epoch": 1.8093385214007782,
         
     | 
| 826 | 
         
            +
                  "grad_norm": 7.556872728377763,
         
     | 
| 827 | 
         
            +
                  "learning_rate": 1.8220369308353255e-05,
         
     | 
| 828 | 
         
            +
                  "loss": 1.4187,
         
     | 
| 829 | 
         
            +
                  "step": 117
         
     | 
| 830 | 
         
            +
                },
         
     | 
| 831 | 
         
            +
                {
         
     | 
| 832 | 
         
            +
                  "epoch": 1.8249027237354085,
         
     | 
| 833 | 
         
            +
                  "grad_norm": 11.500517221563017,
         
     | 
| 834 | 
         
            +
                  "learning_rate": 1.8186194581734922e-05,
         
     | 
| 835 | 
         
            +
                  "loss": 1.2614,
         
     | 
| 836 | 
         
            +
                  "step": 118
         
     | 
| 837 | 
         
            +
                },
         
     | 
| 838 | 
         
            +
                {
         
     | 
| 839 | 
         
            +
                  "epoch": 1.840466926070039,
         
     | 
| 840 | 
         
            +
                  "grad_norm": 6.973436659657028,
         
     | 
| 841 | 
         
            +
                  "learning_rate": 1.815173156223906e-05,
         
     | 
| 842 | 
         
            +
                  "loss": 1.6287,
         
     | 
| 843 | 
         
            +
                  "step": 119
         
     | 
| 844 | 
         
            +
                },
         
     | 
| 845 | 
         
            +
                {
         
     | 
| 846 | 
         
            +
                  "epoch": 1.8560311284046693,
         
     | 
| 847 | 
         
            +
                  "grad_norm": 8.82430378736939,
         
     | 
| 848 | 
         
            +
                  "learning_rate": 1.811698163243929e-05,
         
     | 
| 849 | 
         
            +
                  "loss": 1.7683,
         
     | 
| 850 | 
         
            +
                  "step": 120
         
     | 
| 851 | 
         
            +
                },
         
     | 
| 852 | 
         
            +
                {
         
     | 
| 853 | 
         
            +
                  "epoch": 1.8715953307392996,
         
     | 
| 854 | 
         
            +
                  "grad_norm": 7.44523008567814,
         
     | 
| 855 | 
         
            +
                  "learning_rate": 1.8081946186419375e-05,
         
     | 
| 856 | 
         
            +
                  "loss": 1.3718,
         
     | 
| 857 | 
         
            +
                  "step": 121
         
     | 
| 858 | 
         
            +
                },
         
     | 
| 859 | 
         
            +
                {
         
     | 
| 860 | 
         
            +
                  "epoch": 1.88715953307393,
         
     | 
| 861 | 
         
            +
                  "grad_norm": 11.092035922405335,
         
     | 
| 862 | 
         
            +
                  "learning_rate": 1.804662662971732e-05,
         
     | 
| 863 | 
         
            +
                  "loss": 1.2244,
         
     | 
| 864 | 
         
            +
                  "step": 122
         
     | 
| 865 | 
         
            +
                },
         
     | 
| 866 | 
         
            +
                {
         
     | 
| 867 | 
         
            +
                  "epoch": 1.9027237354085602,
         
     | 
| 868 | 
         
            +
                  "grad_norm": 8.907024665470365,
         
     | 
| 869 | 
         
            +
                  "learning_rate": 1.801102437926896e-05,
         
     | 
| 870 | 
         
            +
                  "loss": 1.1516,
         
     | 
| 871 | 
         
            +
                  "step": 123
         
     | 
| 872 | 
         
            +
                },
         
     | 
| 873 | 
         
            +
                {
         
     | 
| 874 | 
         
            +
                  "epoch": 1.9182879377431905,
         
     | 
| 875 | 
         
            +
                  "grad_norm": 7.330064038520311,
         
     | 
| 876 | 
         
            +
                  "learning_rate": 1.797514086335113e-05,
         
     | 
| 877 | 
         
            +
                  "loss": 1.288,
         
     | 
| 878 | 
         
            +
                  "step": 124
         
     | 
| 879 | 
         
            +
                },
         
     | 
| 880 | 
         
            +
                {
         
     | 
| 881 | 
         
            +
                  "epoch": 1.933852140077821,
         
     | 
| 882 | 
         
            +
                  "grad_norm": 10.980569189715007,
         
     | 
| 883 | 
         
            +
                  "learning_rate": 1.7938977521524355e-05,
         
     | 
| 884 | 
         
            +
                  "loss": 1.3815,
         
     | 
| 885 | 
         
            +
                  "step": 125
         
     | 
| 886 | 
         
            +
                },
         
     | 
| 887 | 
         
            +
                {
         
     | 
| 888 | 
         
            +
                  "epoch": 1.9494163424124513,
         
     | 
| 889 | 
         
            +
                  "grad_norm": 8.792218570481833,
         
     | 
| 890 | 
         
            +
                  "learning_rate": 1.79025358045751e-05,
         
     | 
| 891 | 
         
            +
                  "loss": 1.5337,
         
     | 
| 892 | 
         
            +
                  "step": 126
         
     | 
| 893 | 
         
            +
                },
         
     | 
| 894 | 
         
            +
                {
         
     | 
| 895 | 
         
            +
                  "epoch": 1.9649805447470818,
         
     | 
| 896 | 
         
            +
                  "grad_norm": 8.759608210372287,
         
     | 
| 897 | 
         
            +
                  "learning_rate": 1.786581717445759e-05,
         
     | 
| 898 | 
         
            +
                  "loss": 1.4971,
         
     | 
| 899 | 
         
            +
                  "step": 127
         
     | 
| 900 | 
         
            +
                },
         
     | 
| 901 | 
         
            +
                {
         
     | 
| 902 | 
         
            +
                  "epoch": 1.9805447470817121,
         
     | 
| 903 | 
         
            +
                  "grad_norm": 6.719480961643205,
         
     | 
| 904 | 
         
            +
                  "learning_rate": 1.782882310423512e-05,
         
     | 
| 905 | 
         
            +
                  "loss": 1.3845,
         
     | 
| 906 | 
         
            +
                  "step": 128
         
     | 
| 907 | 
         
            +
                },
         
     | 
| 908 | 
         
            +
                {
         
     | 
| 909 | 
         
            +
                  "epoch": 1.9961089494163424,
         
     | 
| 910 | 
         
            +
                  "grad_norm": 11.98512915914497,
         
     | 
| 911 | 
         
            +
                  "learning_rate": 1.7791555078020992e-05,
         
     | 
| 912 | 
         
            +
                  "loss": 1.8083,
         
     | 
| 913 | 
         
            +
                  "step": 129
         
     | 
| 914 | 
         
            +
                },
         
     | 
| 915 | 
         
            +
                {
         
     | 
| 916 | 
         
            +
                  "epoch": 2.0,
         
     | 
| 917 | 
         
            +
                  "grad_norm": 11.98512915914497,
         
     | 
| 918 | 
         
            +
                  "learning_rate": 1.7754014590918964e-05,
         
     | 
| 919 | 
         
            +
                  "loss": 0.3816,
         
     | 
| 920 | 
         
            +
                  "step": 130
         
     | 
| 921 | 
         
            +
                },
         
     | 
| 922 | 
         
            +
                {
         
     | 
| 923 | 
         
            +
                  "epoch": 2.0155642023346303,
         
     | 
| 924 | 
         
            +
                  "grad_norm": 6.076810203247505,
         
     | 
| 925 | 
         
            +
                  "learning_rate": 1.771620314896327e-05,
         
     | 
| 926 | 
         
            +
                  "loss": 1.1992,
         
     | 
| 927 | 
         
            +
                  "step": 131
         
     | 
| 928 | 
         
            +
                },
         
     | 
| 929 | 
         
            +
                {
         
     | 
| 930 | 
         
            +
                  "epoch": 2.0311284046692606,
         
     | 
| 931 | 
         
            +
                  "grad_norm": 5.799331945733901,
         
     | 
| 932 | 
         
            +
                  "learning_rate": 1.76781222690582e-05,
         
     | 
| 933 | 
         
            +
                  "loss": 0.9603,
         
     | 
| 934 | 
         
            +
                  "step": 132
         
     | 
| 935 | 
         
            +
                },
         
     | 
| 936 | 
         
            +
                {
         
     | 
| 937 | 
         
            +
                  "epoch": 2.046692607003891,
         
     | 
| 938 | 
         
            +
                  "grad_norm": 7.639810257162938,
         
     | 
| 939 | 
         
            +
                  "learning_rate": 1.763977347891725e-05,
         
     | 
| 940 | 
         
            +
                  "loss": 1.1609,
         
     | 
| 941 | 
         
            +
                  "step": 133
         
     | 
| 942 | 
         
            +
                },
         
     | 
| 943 | 
         
            +
                {
         
     | 
| 944 | 
         
            +
                  "epoch": 2.062256809338521,
         
     | 
| 945 | 
         
            +
                  "grad_norm": 4.641223961792758,
         
     | 
| 946 | 
         
            +
                  "learning_rate": 1.7601158317001835e-05,
         
     | 
| 947 | 
         
            +
                  "loss": 1.4175,
         
     | 
| 948 | 
         
            +
                  "step": 134
         
     | 
| 949 | 
         
            +
                },
         
     | 
| 950 | 
         
            +
                {
         
     | 
| 951 | 
         
            +
                  "epoch": 2.077821011673152,
         
     | 
| 952 | 
         
            +
                  "grad_norm": 6.543906827631292,
         
     | 
| 953 | 
         
            +
                  "learning_rate": 1.756227833245956e-05,
         
     | 
| 954 | 
         
            +
                  "loss": 1.1293,
         
     | 
| 955 | 
         
            +
                  "step": 135
         
     | 
| 956 | 
         
            +
                },
         
     | 
| 957 | 
         
            +
                {
         
     | 
| 958 | 
         
            +
                  "epoch": 2.093385214007782,
         
     | 
| 959 | 
         
            +
                  "grad_norm": 5.807214604460152,
         
     | 
| 960 | 
         
            +
                  "learning_rate": 1.752313508506208e-05,
         
     | 
| 961 | 
         
            +
                  "loss": 1.2959,
         
     | 
| 962 | 
         
            +
                  "step": 136
         
     | 
| 963 | 
         
            +
                },
         
     | 
| 964 | 
         
            +
                {
         
     | 
| 965 | 
         
            +
                  "epoch": 2.1089494163424125,
         
     | 
| 966 | 
         
            +
                  "grad_norm": 5.553563692538825,
         
     | 
| 967 | 
         
            +
                  "learning_rate": 1.748373014514253e-05,
         
     | 
| 968 | 
         
            +
                  "loss": 1.5164,
         
     | 
| 969 | 
         
            +
                  "step": 137
         
     | 
| 970 | 
         
            +
                },
         
     | 
| 971 | 
         
            +
                {
         
     | 
| 972 | 
         
            +
                  "epoch": 2.124513618677043,
         
     | 
| 973 | 
         
            +
                  "grad_norm": 5.570141549794821,
         
     | 
| 974 | 
         
            +
                  "learning_rate": 1.7444065093532507e-05,
         
     | 
| 975 | 
         
            +
                  "loss": 1.223,
         
     | 
| 976 | 
         
            +
                  "step": 138
         
     | 
| 977 | 
         
            +
                },
         
     | 
| 978 | 
         
            +
                {
         
     | 
| 979 | 
         
            +
                  "epoch": 2.140077821011673,
         
     | 
| 980 | 
         
            +
                  "grad_norm": 10.218543215792467,
         
     | 
| 981 | 
         
            +
                  "learning_rate": 1.740414152149868e-05,
         
     | 
| 982 | 
         
            +
                  "loss": 1.2693,
         
     | 
| 983 | 
         
            +
                  "step": 139
         
     | 
| 984 | 
         
            +
                },
         
     | 
| 985 | 
         
            +
                {
         
     | 
| 986 | 
         
            +
                  "epoch": 2.1556420233463034,
         
     | 
| 987 | 
         
            +
                  "grad_norm": 6.654829769858401,
         
     | 
| 988 | 
         
            +
                  "learning_rate": 1.736396103067893e-05,
         
     | 
| 989 | 
         
            +
                  "loss": 1.2822,
         
     | 
| 990 | 
         
            +
                  "step": 140
         
     | 
| 991 | 
         
            +
                },
         
     | 
| 992 | 
         
            +
                {
         
     | 
| 993 | 
         
            +
                  "epoch": 2.1712062256809337,
         
     | 
| 994 | 
         
            +
                  "grad_norm": 7.063879942686053,
         
     | 
| 995 | 
         
            +
                  "learning_rate": 1.73235252330181e-05,
         
     | 
| 996 | 
         
            +
                  "loss": 1.2313,
         
     | 
| 997 | 
         
            +
                  "step": 141
         
     | 
| 998 | 
         
            +
                },
         
     | 
| 999 | 
         
            +
                {
         
     | 
| 1000 | 
         
            +
                  "epoch": 2.1867704280155644,
         
     | 
| 1001 | 
         
            +
                  "grad_norm": 8.26459321928057,
         
     | 
| 1002 | 
         
            +
                  "learning_rate": 1.728283575070333e-05,
         
     | 
| 1003 | 
         
            +
                  "loss": 1.3606,
         
     | 
| 1004 | 
         
            +
                  "step": 142
         
     | 
| 1005 | 
         
            +
                },
         
     | 
| 1006 | 
         
            +
                {
         
     | 
| 1007 | 
         
            +
                  "epoch": 2.2023346303501947,
         
     | 
| 1008 | 
         
            +
                  "grad_norm": 8.158351915542648,
         
     | 
| 1009 | 
         
            +
                  "learning_rate": 1.7241894216098995e-05,
         
     | 
| 1010 | 
         
            +
                  "loss": 1.1179,
         
     | 
| 1011 | 
         
            +
                  "step": 143
         
     | 
| 1012 | 
         
            +
                },
         
     | 
| 1013 | 
         
            +
                {
         
     | 
| 1014 | 
         
            +
                  "epoch": 2.217898832684825,
         
     | 
| 1015 | 
         
            +
                  "grad_norm": 6.025748582779371,
         
     | 
| 1016 | 
         
            +
                  "learning_rate": 1.720070227168118e-05,
         
     | 
| 1017 | 
         
            +
                  "loss": 1.2041,
         
     | 
| 1018 | 
         
            +
                  "step": 144
         
     | 
| 1019 | 
         
            +
                },
         
     | 
| 1020 | 
         
            +
                {
         
     | 
| 1021 | 
         
            +
                  "epoch": 2.2334630350194553,
         
     | 
| 1022 | 
         
            +
                  "grad_norm": 7.30387722610904,
         
     | 
| 1023 | 
         
            +
                  "learning_rate": 1.7159261569971828e-05,
         
     | 
| 1024 | 
         
            +
                  "loss": 1.2661,
         
     | 
| 1025 | 
         
            +
                  "step": 145
         
     | 
| 1026 | 
         
            +
                },
         
     | 
| 1027 | 
         
            +
                {
         
     | 
| 1028 | 
         
            +
                  "epoch": 2.2490272373540856,
         
     | 
| 1029 | 
         
            +
                  "grad_norm": 6.274862129099997,
         
     | 
| 1030 | 
         
            +
                  "learning_rate": 1.7117573773472418e-05,
         
     | 
| 1031 | 
         
            +
                  "loss": 1.3209,
         
     | 
| 1032 | 
         
            +
                  "step": 146
         
     | 
| 1033 | 
         
            +
                },
         
     | 
| 1034 | 
         
            +
                {
         
     | 
| 1035 | 
         
            +
                  "epoch": 2.264591439688716,
         
     | 
| 1036 | 
         
            +
                  "grad_norm": 8.052280930110639,
         
     | 
| 1037 | 
         
            +
                  "learning_rate": 1.7075640554597278e-05,
         
     | 
| 1038 | 
         
            +
                  "loss": 1.1796,
         
     | 
| 1039 | 
         
            +
                  "step": 147
         
     | 
| 1040 | 
         
            +
                },
         
     | 
| 1041 | 
         
            +
                {
         
     | 
| 1042 | 
         
            +
                  "epoch": 2.280155642023346,
         
     | 
| 1043 | 
         
            +
                  "grad_norm": 10.893409682259657,
         
     | 
| 1044 | 
         
            +
                  "learning_rate": 1.703346359560651e-05,
         
     | 
| 1045 | 
         
            +
                  "loss": 1.4143,
         
     | 
| 1046 | 
         
            +
                  "step": 148
         
     | 
| 1047 | 
         
            +
                },
         
     | 
| 1048 | 
         
            +
                {
         
     | 
| 1049 | 
         
            +
                  "epoch": 2.2957198443579765,
         
     | 
| 1050 | 
         
            +
                  "grad_norm": 9.090803934496224,
         
     | 
| 1051 | 
         
            +
                  "learning_rate": 1.6991044588538455e-05,
         
     | 
| 1052 | 
         
            +
                  "loss": 1.1129,
         
     | 
| 1053 | 
         
            +
                  "step": 149
         
     | 
| 1054 | 
         
            +
                },
         
     | 
| 1055 | 
         
            +
                {
         
     | 
| 1056 | 
         
            +
                  "epoch": 2.311284046692607,
         
     | 
| 1057 | 
         
            +
                  "grad_norm": 9.407206024176414,
         
     | 
| 1058 | 
         
            +
                  "learning_rate": 1.694838523514187e-05,
         
     | 
| 1059 | 
         
            +
                  "loss": 1.1586,
         
     | 
| 1060 | 
         
            +
                  "step": 150
         
     | 
| 1061 | 
         
            +
                },
         
     | 
| 1062 | 
         
            +
                {
         
     | 
| 1063 | 
         
            +
                  "epoch": 2.3268482490272375,
         
     | 
| 1064 | 
         
            +
                  "grad_norm": 8.517506234140628,
         
     | 
| 1065 | 
         
            +
                  "learning_rate": 1.690548724680761e-05,
         
     | 
| 1066 | 
         
            +
                  "loss": 1.4231,
         
     | 
| 1067 | 
         
            +
                  "step": 151
         
     | 
| 1068 | 
         
            +
                },
         
     | 
| 1069 | 
         
            +
                {
         
     | 
| 1070 | 
         
            +
                  "epoch": 2.342412451361868,
         
     | 
| 1071 | 
         
            +
                  "grad_norm": 6.1621127583650255,
         
     | 
| 1072 | 
         
            +
                  "learning_rate": 1.6862352344500004e-05,
         
     | 
| 1073 | 
         
            +
                  "loss": 1.3577,
         
     | 
| 1074 | 
         
            +
                  "step": 152
         
     | 
| 1075 | 
         
            +
                },
         
     | 
| 1076 | 
         
            +
                {
         
     | 
| 1077 | 
         
            +
                  "epoch": 2.357976653696498,
         
     | 
| 1078 | 
         
            +
                  "grad_norm": 37.87140785888283,
         
     | 
| 1079 | 
         
            +
                  "learning_rate": 1.681898225868779e-05,
         
     | 
| 1080 | 
         
            +
                  "loss": 1.3384,
         
     | 
| 1081 | 
         
            +
                  "step": 153
         
     | 
| 1082 | 
         
            +
                },
         
     | 
| 1083 | 
         
            +
                {
         
     | 
| 1084 | 
         
            +
                  "epoch": 2.3735408560311284,
         
     | 
| 1085 | 
         
            +
                  "grad_norm": 8.232248393098434,
         
     | 
| 1086 | 
         
            +
                  "learning_rate": 1.677537872927471e-05,
         
     | 
| 1087 | 
         
            +
                  "loss": 1.3798,
         
     | 
| 1088 | 
         
            +
                  "step": 154
         
     | 
| 1089 | 
         
            +
                },
         
     | 
| 1090 | 
         
            +
                {
         
     | 
| 1091 | 
         
            +
                  "epoch": 2.3891050583657587,
         
     | 
| 1092 | 
         
            +
                  "grad_norm": 8.407799337670626,
         
     | 
| 1093 | 
         
            +
                  "learning_rate": 1.673154350552971e-05,
         
     | 
| 1094 | 
         
            +
                  "loss": 1.3535,
         
     | 
| 1095 | 
         
            +
                  "step": 155
         
     | 
| 1096 | 
         
            +
                },
         
     | 
| 1097 | 
         
            +
                {
         
     | 
| 1098 | 
         
            +
                  "epoch": 2.404669260700389,
         
     | 
| 1099 | 
         
            +
                  "grad_norm": 11.052092401395148,
         
     | 
| 1100 | 
         
            +
                  "learning_rate": 1.6687478346016736e-05,
         
     | 
| 1101 | 
         
            +
                  "loss": 1.3003,
         
     | 
| 1102 | 
         
            +
                  "step": 156
         
     | 
| 1103 | 
         
            +
                },
         
     | 
| 1104 | 
         
            +
                {
         
     | 
| 1105 | 
         
            +
                  "epoch": 2.4202334630350193,
         
     | 
| 1106 | 
         
            +
                  "grad_norm": 11.914207501283393,
         
     | 
| 1107 | 
         
            +
                  "learning_rate": 1.6643185018524227e-05,
         
     | 
| 1108 | 
         
            +
                  "loss": 1.2803,
         
     | 
| 1109 | 
         
            +
                  "step": 157
         
     | 
| 1110 | 
         
            +
                },
         
     | 
| 1111 | 
         
            +
                {
         
     | 
| 1112 | 
         
            +
                  "epoch": 2.43579766536965,
         
     | 
| 1113 | 
         
            +
                  "grad_norm": 9.369362265914104,
         
     | 
| 1114 | 
         
            +
                  "learning_rate": 1.6598665299994162e-05,
         
     | 
| 1115 | 
         
            +
                  "loss": 1.3889,
         
     | 
| 1116 | 
         
            +
                  "step": 158
         
     | 
| 1117 | 
         
            +
                },
         
     | 
| 1118 | 
         
            +
                {
         
     | 
| 1119 | 
         
            +
                  "epoch": 2.4513618677042803,
         
     | 
| 1120 | 
         
            +
                  "grad_norm": 6.155749688526481,
         
     | 
| 1121 | 
         
            +
                  "learning_rate": 1.655392097645079e-05,
         
     | 
| 1122 | 
         
            +
                  "loss": 1.3065,
         
     | 
| 1123 | 
         
            +
                  "step": 159
         
     | 
| 1124 | 
         
            +
                },
         
     | 
| 1125 | 
         
            +
                {
         
     | 
| 1126 | 
         
            +
                  "epoch": 2.4669260700389106,
         
     | 
| 1127 | 
         
            +
                  "grad_norm": 7.610662694751711,
         
     | 
| 1128 | 
         
            +
                  "learning_rate": 1.6508953842928966e-05,
         
     | 
| 1129 | 
         
            +
                  "loss": 1.3677,
         
     | 
| 1130 | 
         
            +
                  "step": 160
         
     | 
| 1131 | 
         
            +
                },
         
     | 
| 1132 | 
         
            +
                {
         
     | 
| 1133 | 
         
            +
                  "epoch": 2.482490272373541,
         
     | 
| 1134 | 
         
            +
                  "grad_norm": 8.874434851480999,
         
     | 
| 1135 | 
         
            +
                  "learning_rate": 1.6463765703402154e-05,
         
     | 
| 1136 | 
         
            +
                  "loss": 1.1467,
         
     | 
| 1137 | 
         
            +
                  "step": 161
         
     | 
| 1138 | 
         
            +
                },
         
     | 
| 1139 | 
         
            +
                {
         
     | 
| 1140 | 
         
            +
                  "epoch": 2.498054474708171,
         
     | 
| 1141 | 
         
            +
                  "grad_norm": 6.048721329156399,
         
     | 
| 1142 | 
         
            +
                  "learning_rate": 1.6418358370710048e-05,
         
     | 
| 1143 | 
         
            +
                  "loss": 1.2878,
         
     | 
| 1144 | 
         
            +
                  "step": 162
         
     | 
| 1145 | 
         
            +
                },
         
     | 
| 1146 | 
         
            +
                {
         
     | 
| 1147 | 
         
            +
                  "epoch": 2.5136186770428015,
         
     | 
| 1148 | 
         
            +
                  "grad_norm": 8.012765512165412,
         
     | 
| 1149 | 
         
            +
                  "learning_rate": 1.6372733666485842e-05,
         
     | 
| 1150 | 
         
            +
                  "loss": 1.2369,
         
     | 
| 1151 | 
         
            +
                  "step": 163
         
     | 
| 1152 | 
         
            +
                },
         
     | 
| 1153 | 
         
            +
                {
         
     | 
| 1154 | 
         
            +
                  "epoch": 2.529182879377432,
         
     | 
| 1155 | 
         
            +
                  "grad_norm": 9.98813005959193,
         
     | 
| 1156 | 
         
            +
                  "learning_rate": 1.6326893421083157e-05,
         
     | 
| 1157 | 
         
            +
                  "loss": 1.3142,
         
     | 
| 1158 | 
         
            +
                  "step": 164
         
     | 
| 1159 | 
         
            +
                },
         
     | 
| 1160 | 
         
            +
                {
         
     | 
| 1161 | 
         
            +
                  "epoch": 2.544747081712062,
         
     | 
| 1162 | 
         
            +
                  "grad_norm": 6.989195393880204,
         
     | 
| 1163 | 
         
            +
                  "learning_rate": 1.6280839473502607e-05,
         
     | 
| 1164 | 
         
            +
                  "loss": 1.2651,
         
     | 
| 1165 | 
         
            +
                  "step": 165
         
     | 
| 1166 | 
         
            +
                },
         
     | 
| 1167 | 
         
            +
                {
         
     | 
| 1168 | 
         
            +
                  "epoch": 2.5603112840466924,
         
     | 
| 1169 | 
         
            +
                  "grad_norm": 13.328346097167426,
         
     | 
| 1170 | 
         
            +
                  "learning_rate": 1.6234573671318027e-05,
         
     | 
| 1171 | 
         
            +
                  "loss": 1.5734,
         
     | 
| 1172 | 
         
            +
                  "step": 166
         
     | 
| 1173 | 
         
            +
                },
         
     | 
| 1174 | 
         
            +
                {
         
     | 
| 1175 | 
         
            +
                  "epoch": 2.5758754863813227,
         
     | 
| 1176 | 
         
            +
                  "grad_norm": 7.47939944848892,
         
     | 
| 1177 | 
         
            +
                  "learning_rate": 1.6188097870602344e-05,
         
     | 
| 1178 | 
         
            +
                  "loss": 1.1876,
         
     | 
| 1179 | 
         
            +
                  "step": 167
         
     | 
| 1180 | 
         
            +
                },
         
     | 
| 1181 | 
         
            +
                {
         
     | 
| 1182 | 
         
            +
                  "epoch": 2.5914396887159534,
         
     | 
| 1183 | 
         
            +
                  "grad_norm": 9.813447558487908,
         
     | 
| 1184 | 
         
            +
                  "learning_rate": 1.614141393585313e-05,
         
     | 
| 1185 | 
         
            +
                  "loss": 1.2428,
         
     | 
| 1186 | 
         
            +
                  "step": 168
         
     | 
| 1187 | 
         
            +
                },
         
     | 
| 1188 | 
         
            +
                {
         
     | 
| 1189 | 
         
            +
                  "epoch": 2.6070038910505837,
         
     | 
| 1190 | 
         
            +
                  "grad_norm": 8.170087254779242,
         
     | 
| 1191 | 
         
            +
                  "learning_rate": 1.6094523739917797e-05,
         
     | 
| 1192 | 
         
            +
                  "loss": 1.2607,
         
     | 
| 1193 | 
         
            +
                  "step": 169
         
     | 
| 1194 | 
         
            +
                },
         
     | 
| 1195 | 
         
            +
                {
         
     | 
| 1196 | 
         
            +
                  "epoch": 2.622568093385214,
         
     | 
| 1197 | 
         
            +
                  "grad_norm": 6.143715398790098,
         
     | 
| 1198 | 
         
            +
                  "learning_rate": 1.6047429163918444e-05,
         
     | 
| 1199 | 
         
            +
                  "loss": 1.3353,
         
     | 
| 1200 | 
         
            +
                  "step": 170
         
     | 
| 1201 | 
         
            +
                },
         
     | 
| 1202 | 
         
            +
                {
         
     | 
| 1203 | 
         
            +
                  "epoch": 2.6381322957198443,
         
     | 
| 1204 | 
         
            +
                  "grad_norm": 17.125656609647287,
         
     | 
| 1205 | 
         
            +
                  "learning_rate": 1.600013209717642e-05,
         
     | 
| 1206 | 
         
            +
                  "loss": 1.4865,
         
     | 
| 1207 | 
         
            +
                  "step": 171
         
     | 
| 1208 | 
         
            +
                },
         
     | 
| 1209 | 
         
            +
                {
         
     | 
| 1210 | 
         
            +
                  "epoch": 2.6536964980544746,
         
     | 
| 1211 | 
         
            +
                  "grad_norm": 10.776858476646886,
         
     | 
| 1212 | 
         
            +
                  "learning_rate": 1.5952634437136523e-05,
         
     | 
| 1213 | 
         
            +
                  "loss": 1.5398,
         
     | 
| 1214 | 
         
            +
                  "step": 172
         
     | 
| 1215 | 
         
            +
                },
         
     | 
| 1216 | 
         
            +
                {
         
     | 
| 1217 | 
         
            +
                  "epoch": 2.669260700389105,
         
     | 
| 1218 | 
         
            +
                  "grad_norm": 7.0818183172701605,
         
     | 
| 1219 | 
         
            +
                  "learning_rate": 1.5904938089290864e-05,
         
     | 
| 1220 | 
         
            +
                  "loss": 1.3059,
         
     | 
| 1221 | 
         
            +
                  "step": 173
         
     | 
| 1222 | 
         
            +
                },
         
     | 
| 1223 | 
         
            +
                {
         
     | 
| 1224 | 
         
            +
                  "epoch": 2.6848249027237356,
         
     | 
| 1225 | 
         
            +
                  "grad_norm": 20.107104125600667,
         
     | 
| 1226 | 
         
            +
                  "learning_rate": 1.5857044967102423e-05,
         
     | 
| 1227 | 
         
            +
                  "loss": 1.325,
         
     | 
| 1228 | 
         
            +
                  "step": 174
         
     | 
| 1229 | 
         
            +
                },
         
     | 
| 1230 | 
         
            +
                {
         
     | 
| 1231 | 
         
            +
                  "epoch": 2.700389105058366,
         
     | 
| 1232 | 
         
            +
                  "grad_norm": 9.685536590494742,
         
     | 
| 1233 | 
         
            +
                  "learning_rate": 1.580895699192831e-05,
         
     | 
| 1234 | 
         
            +
                  "loss": 1.2209,
         
     | 
| 1235 | 
         
            +
                  "step": 175
         
     | 
| 1236 | 
         
            +
                },
         
     | 
| 1237 | 
         
            +
                {
         
     | 
| 1238 | 
         
            +
                  "epoch": 2.7159533073929962,
         
     | 
| 1239 | 
         
            +
                  "grad_norm": 14.272302800628733,
         
     | 
| 1240 | 
         
            +
                  "learning_rate": 1.5760676092942663e-05,
         
     | 
| 1241 | 
         
            +
                  "loss": 1.1283,
         
     | 
| 1242 | 
         
            +
                  "step": 176
         
     | 
| 1243 | 
         
            +
                },
         
     | 
| 1244 | 
         
            +
                {
         
     | 
| 1245 | 
         
            +
                  "epoch": 2.7315175097276265,
         
     | 
| 1246 | 
         
            +
                  "grad_norm": 8.529426981120846,
         
     | 
| 1247 | 
         
            +
                  "learning_rate": 1.571220420705926e-05,
         
     | 
| 1248 | 
         
            +
                  "loss": 1.1967,
         
     | 
| 1249 | 
         
            +
                  "step": 177
         
     | 
| 1250 | 
         
            +
                },
         
     | 
| 1251 | 
         
            +
                {
         
     | 
| 1252 | 
         
            +
                  "epoch": 2.747081712062257,
         
     | 
| 1253 | 
         
            +
                  "grad_norm": 9.25820017440293,
         
     | 
| 1254 | 
         
            +
                  "learning_rate": 1.5663543278853818e-05,
         
     | 
| 1255 | 
         
            +
                  "loss": 1.2561,
         
     | 
| 1256 | 
         
            +
                  "step": 178
         
     | 
| 1257 | 
         
            +
                },
         
     | 
| 1258 | 
         
            +
                {
         
     | 
| 1259 | 
         
            +
                  "epoch": 2.762645914396887,
         
     | 
| 1260 | 
         
            +
                  "grad_norm": 6.7665962336727725,
         
     | 
| 1261 | 
         
            +
                  "learning_rate": 1.5614695260485973e-05,
         
     | 
| 1262 | 
         
            +
                  "loss": 1.1381,
         
     | 
| 1263 | 
         
            +
                  "step": 179
         
     | 
| 1264 | 
         
            +
                },
         
     | 
| 1265 | 
         
            +
                {
         
     | 
| 1266 | 
         
            +
                  "epoch": 2.7782101167315174,
         
     | 
| 1267 | 
         
            +
                  "grad_norm": 11.677157670781277,
         
     | 
| 1268 | 
         
            +
                  "learning_rate": 1.5565662111620967e-05,
         
     | 
| 1269 | 
         
            +
                  "loss": 1.123,
         
     | 
| 1270 | 
         
            +
                  "step": 180
         
     | 
| 1271 | 
         
            +
                },
         
     | 
| 1272 | 
         
            +
                {
         
     | 
| 1273 | 
         
            +
                  "epoch": 2.7937743190661477,
         
     | 
| 1274 | 
         
            +
                  "grad_norm": 10.612616450023706,
         
     | 
| 1275 | 
         
            +
                  "learning_rate": 1.5516445799351046e-05,
         
     | 
| 1276 | 
         
            +
                  "loss": 1.4241,
         
     | 
| 1277 | 
         
            +
                  "step": 181
         
     | 
| 1278 | 
         
            +
                },
         
     | 
| 1279 | 
         
            +
                {
         
     | 
| 1280 | 
         
            +
                  "epoch": 2.809338521400778,
         
     | 
| 1281 | 
         
            +
                  "grad_norm": 5.785097669619218,
         
     | 
| 1282 | 
         
            +
                  "learning_rate": 1.5467048298116516e-05,
         
     | 
| 1283 | 
         
            +
                  "loss": 1.1191,
         
     | 
| 1284 | 
         
            +
                  "step": 182
         
     | 
| 1285 | 
         
            +
                },
         
     | 
| 1286 | 
         
            +
                {
         
     | 
| 1287 | 
         
            +
                  "epoch": 2.8249027237354083,
         
     | 
| 1288 | 
         
            +
                  "grad_norm": 19.456658401658856,
         
     | 
| 1289 | 
         
            +
                  "learning_rate": 1.5417471589626563e-05,
         
     | 
| 1290 | 
         
            +
                  "loss": 1.0679,
         
     | 
| 1291 | 
         
            +
                  "step": 183
         
     | 
| 1292 | 
         
            +
                },
         
     | 
| 1293 | 
         
            +
                {
         
     | 
| 1294 | 
         
            +
                  "epoch": 2.840466926070039,
         
     | 
| 1295 | 
         
            +
                  "grad_norm": 8.020315674017025,
         
     | 
| 1296 | 
         
            +
                  "learning_rate": 1.5367717662779732e-05,
         
     | 
| 1297 | 
         
            +
                  "loss": 1.2059,
         
     | 
| 1298 | 
         
            +
                  "step": 184
         
     | 
| 1299 | 
         
            +
                },
         
     | 
| 1300 | 
         
            +
                {
         
     | 
| 1301 | 
         
            +
                  "epoch": 2.8560311284046693,
         
     | 
| 1302 | 
         
            +
                  "grad_norm": 9.637600750103296,
         
     | 
| 1303 | 
         
            +
                  "learning_rate": 1.531778851358414e-05,
         
     | 
| 1304 | 
         
            +
                  "loss": 1.1613,
         
     | 
| 1305 | 
         
            +
                  "step": 185
         
     | 
| 1306 | 
         
            +
                },
         
     | 
| 1307 | 
         
            +
                {
         
     | 
| 1308 | 
         
            +
                  "epoch": 2.8715953307392996,
         
     | 
| 1309 | 
         
            +
                  "grad_norm": 6.693770146874348,
         
     | 
| 1310 | 
         
            +
                  "learning_rate": 1.5267686145077406e-05,
         
     | 
| 1311 | 
         
            +
                  "loss": 1.127,
         
     | 
| 1312 | 
         
            +
                  "step": 186
         
     | 
| 1313 | 
         
            +
                },
         
     | 
| 1314 | 
         
            +
                {
         
     | 
| 1315 | 
         
            +
                  "epoch": 2.88715953307393,
         
     | 
| 1316 | 
         
            +
                  "grad_norm": 18.137993374614158,
         
     | 
| 1317 | 
         
            +
                  "learning_rate": 1.5217412567246298e-05,
         
     | 
| 1318 | 
         
            +
                  "loss": 1.5315,
         
     | 
| 1319 | 
         
            +
                  "step": 187
         
     | 
| 1320 | 
         
            +
                },
         
     | 
| 1321 | 
         
            +
                {
         
     | 
| 1322 | 
         
            +
                  "epoch": 2.90272373540856,
         
     | 
| 1323 | 
         
            +
                  "grad_norm": 9.571406690209106,
         
     | 
| 1324 | 
         
            +
                  "learning_rate": 1.5166969796946087e-05,
         
     | 
| 1325 | 
         
            +
                  "loss": 1.1774,
         
     | 
| 1326 | 
         
            +
                  "step": 188
         
     | 
| 1327 | 
         
            +
                },
         
     | 
| 1328 | 
         
            +
                {
         
     | 
| 1329 | 
         
            +
                  "epoch": 2.9182879377431905,
         
     | 
| 1330 | 
         
            +
                  "grad_norm": 19.959548104529457,
         
     | 
| 1331 | 
         
            +
                  "learning_rate": 1.5116359857819635e-05,
         
     | 
| 1332 | 
         
            +
                  "loss": 1.3707,
         
     | 
| 1333 | 
         
            +
                  "step": 189
         
     | 
| 1334 | 
         
            +
                },
         
     | 
| 1335 | 
         
            +
                {
         
     | 
| 1336 | 
         
            +
                  "epoch": 2.9338521400778212,
         
     | 
| 1337 | 
         
            +
                  "grad_norm": 16.169561781876904,
         
     | 
| 1338 | 
         
            +
                  "learning_rate": 1.5065584780216225e-05,
         
     | 
| 1339 | 
         
            +
                  "loss": 1.3618,
         
     | 
| 1340 | 
         
            +
                  "step": 190
         
     | 
| 1341 | 
         
            +
                },
         
     | 
| 1342 | 
         
            +
                {
         
     | 
| 1343 | 
         
            +
                  "epoch": 2.9494163424124515,
         
     | 
| 1344 | 
         
            +
                  "grad_norm": 7.308704556972184,
         
     | 
| 1345 | 
         
            +
                  "learning_rate": 1.501464660111009e-05,
         
     | 
| 1346 | 
         
            +
                  "loss": 1.5718,
         
     | 
| 1347 | 
         
            +
                  "step": 191
         
     | 
| 1348 | 
         
            +
                },
         
     | 
| 1349 | 
         
            +
                {
         
     | 
| 1350 | 
         
            +
                  "epoch": 2.964980544747082,
         
     | 
| 1351 | 
         
            +
                  "grad_norm": 11.175947473356464,
         
     | 
| 1352 | 
         
            +
                  "learning_rate": 1.4963547364018711e-05,
         
     | 
| 1353 | 
         
            +
                  "loss": 1.1821,
         
     | 
| 1354 | 
         
            +
                  "step": 192
         
     | 
| 1355 | 
         
            +
                },
         
     | 
| 1356 | 
         
            +
                {
         
     | 
| 1357 | 
         
            +
                  "epoch": 2.980544747081712,
         
     | 
| 1358 | 
         
            +
                  "grad_norm": 6.880501518603281,
         
     | 
| 1359 | 
         
            +
                  "learning_rate": 1.4912289118920821e-05,
         
     | 
| 1360 | 
         
            +
                  "loss": 1.2976,
         
     | 
| 1361 | 
         
            +
                  "step": 193
         
     | 
| 1362 | 
         
            +
                },
         
     | 
| 1363 | 
         
            +
                {
         
     | 
| 1364 | 
         
            +
                  "epoch": 2.9961089494163424,
         
     | 
| 1365 | 
         
            +
                  "grad_norm": 16.45488939160082,
         
     | 
| 1366 | 
         
            +
                  "learning_rate": 1.4860873922174188e-05,
         
     | 
| 1367 | 
         
            +
                  "loss": 1.5251,
         
     | 
| 1368 | 
         
            +
                  "step": 194
         
     | 
| 1369 | 
         
            +
                },
         
     | 
| 1370 | 
         
            +
                {
         
     | 
| 1371 | 
         
            +
                  "epoch": 3.0,
         
     | 
| 1372 | 
         
            +
                  "grad_norm": 16.45488939160082,
         
     | 
| 1373 | 
         
            +
                  "learning_rate": 1.4809303836433086e-05,
         
     | 
| 1374 | 
         
            +
                  "loss": 0.3486,
         
     | 
| 1375 | 
         
            +
                  "step": 195
         
     | 
| 1376 | 
         
            +
                },
         
     | 
| 1377 | 
         
            +
                {
         
     | 
| 1378 | 
         
            +
                  "epoch": 3.0155642023346303,
         
     | 
| 1379 | 
         
            +
                  "grad_norm": 17.26667705122932,
         
     | 
| 1380 | 
         
            +
                  "learning_rate": 1.4757580930565569e-05,
         
     | 
| 1381 | 
         
            +
                  "loss": 1.2262,
         
     | 
| 1382 | 
         
            +
                  "step": 196
         
     | 
| 1383 | 
         
            +
                },
         
     | 
| 1384 | 
         
            +
                {
         
     | 
| 1385 | 
         
            +
                  "epoch": 3.0311284046692606,
         
     | 
| 1386 | 
         
            +
                  "grad_norm": 7.079930630802389,
         
     | 
| 1387 | 
         
            +
                  "learning_rate": 1.4705707279570476e-05,
         
     | 
| 1388 | 
         
            +
                  "loss": 1.0964,
         
     | 
| 1389 | 
         
            +
                  "step": 197
         
     | 
| 1390 | 
         
            +
                },
         
     | 
| 1391 | 
         
            +
                {
         
     | 
| 1392 | 
         
            +
                  "epoch": 3.046692607003891,
         
     | 
| 1393 | 
         
            +
                  "grad_norm": 5.9707164634211045,
         
     | 
| 1394 | 
         
            +
                  "learning_rate": 1.4653684964494163e-05,
         
     | 
| 1395 | 
         
            +
                  "loss": 1.1285,
         
     | 
| 1396 | 
         
            +
                  "step": 198
         
     | 
| 1397 | 
         
            +
                },
         
     | 
| 1398 | 
         
            +
                {
         
     | 
| 1399 | 
         
            +
                  "epoch": 3.062256809338521,
         
     | 
| 1400 | 
         
            +
                  "grad_norm": 7.374420248201497,
         
     | 
| 1401 | 
         
            +
                  "learning_rate": 1.460151607234705e-05,
         
     | 
| 1402 | 
         
            +
                  "loss": 1.1528,
         
     | 
| 1403 | 
         
            +
                  "step": 199
         
     | 
| 1404 | 
         
            +
                },
         
     | 
| 1405 | 
         
            +
                {
         
     | 
| 1406 | 
         
            +
                  "epoch": 3.077821011673152,
         
     | 
| 1407 | 
         
            +
                  "grad_norm": 7.823768857549868,
         
     | 
| 1408 | 
         
            +
                  "learning_rate": 1.4549202696019868e-05,
         
     | 
| 1409 | 
         
            +
                  "loss": 1.6089,
         
     | 
| 1410 | 
         
            +
                  "step": 200
         
     | 
| 1411 | 
         
            +
                },
         
     | 
| 1412 | 
         
            +
                {
         
     | 
| 1413 | 
         
            +
                  "epoch": 3.093385214007782,
         
     | 
| 1414 | 
         
            +
                  "grad_norm": 13.095423850316486,
         
     | 
| 1415 | 
         
            +
                  "learning_rate": 1.44967469341997e-05,
         
     | 
| 1416 | 
         
            +
                  "loss": 1.4053,
         
     | 
| 1417 | 
         
            +
                  "step": 201
         
     | 
| 1418 | 
         
            +
                },
         
     | 
| 1419 | 
         
            +
                {
         
     | 
| 1420 | 
         
            +
                  "epoch": 3.1089494163424125,
         
     | 
| 1421 | 
         
            +
                  "grad_norm": 8.57838469081796,
         
     | 
| 1422 | 
         
            +
                  "learning_rate": 1.4444150891285809e-05,
         
     | 
| 1423 | 
         
            +
                  "loss": 1.3911,
         
     | 
| 1424 | 
         
            +
                  "step": 202
         
     | 
| 1425 | 
         
            +
                },
         
     | 
| 1426 | 
         
            +
                {
         
     | 
| 1427 | 
         
            +
                  "epoch": 3.124513618677043,
         
     | 
| 1428 | 
         
            +
                  "grad_norm": 10.492752547850309,
         
     | 
| 1429 | 
         
            +
                  "learning_rate": 1.4391416677305183e-05,
         
     | 
| 1430 | 
         
            +
                  "loss": 1.7466,
         
     | 
| 1431 | 
         
            +
                  "step": 203
         
     | 
| 1432 | 
         
            +
                },
         
     | 
| 1433 | 
         
            +
                {
         
     | 
| 1434 | 
         
            +
                  "epoch": 3.140077821011673,
         
     | 
| 1435 | 
         
            +
                  "grad_norm": 12.04907785845439,
         
     | 
| 1436 | 
         
            +
                  "learning_rate": 1.4338546407827912e-05,
         
     | 
| 1437 | 
         
            +
                  "loss": 1.3069,
         
     | 
| 1438 | 
         
            +
                  "step": 204
         
     | 
| 1439 | 
         
            +
                },
         
     | 
| 1440 | 
         
            +
                {
         
     | 
| 1441 | 
         
            +
                  "epoch": 3.1556420233463034,
         
     | 
| 1442 | 
         
            +
                  "grad_norm": 9.082131491175343,
         
     | 
| 1443 | 
         
            +
                  "learning_rate": 1.4285542203882301e-05,
         
     | 
| 1444 | 
         
            +
                  "loss": 1.3909,
         
     | 
| 1445 | 
         
            +
                  "step": 205
         
     | 
| 1446 | 
         
            +
                },
         
     | 
| 1447 | 
         
            +
                {
         
     | 
| 1448 | 
         
            +
                  "epoch": 3.1712062256809337,
         
     | 
| 1449 | 
         
            +
                  "grad_norm": 7.8265612070424035,
         
     | 
| 1450 | 
         
            +
                  "learning_rate": 1.4232406191869786e-05,
         
     | 
| 1451 | 
         
            +
                  "loss": 1.1016,
         
     | 
| 1452 | 
         
            +
                  "step": 206
         
     | 
| 1453 | 
         
            +
                },
         
     | 
| 1454 | 
         
            +
                {
         
     | 
| 1455 | 
         
            +
                  "epoch": 3.1867704280155644,
         
     | 
| 1456 | 
         
            +
                  "grad_norm": 7.017081240597243,
         
     | 
| 1457 | 
         
            +
                  "learning_rate": 1.4179140503479622e-05,
         
     | 
| 1458 | 
         
            +
                  "loss": 1.3007,
         
     | 
| 1459 | 
         
            +
                  "step": 207
         
     | 
| 1460 | 
         
            +
                },
         
     | 
| 1461 | 
         
            +
                {
         
     | 
| 1462 | 
         
            +
                  "epoch": 3.2023346303501947,
         
     | 
| 1463 | 
         
            +
                  "grad_norm": 11.222861362951033,
         
     | 
| 1464 | 
         
            +
                  "learning_rate": 1.4125747275603384e-05,
         
     | 
| 1465 | 
         
            +
                  "loss": 1.4584,
         
     | 
| 1466 | 
         
            +
                  "step": 208
         
     | 
| 1467 | 
         
            +
                },
         
     | 
| 1468 | 
         
            +
                {
         
     | 
| 1469 | 
         
            +
                  "epoch": 3.217898832684825,
         
     | 
| 1470 | 
         
            +
                  "grad_norm": 8.228254518059948,
         
     | 
| 1471 | 
         
            +
                  "learning_rate": 1.4072228650249205e-05,
         
     | 
| 1472 | 
         
            +
                  "loss": 1.1437,
         
     | 
| 1473 | 
         
            +
                  "step": 209
         
     | 
| 1474 | 
         
            +
                },
         
     | 
| 1475 | 
         
            +
                {
         
     | 
| 1476 | 
         
            +
                  "epoch": 3.2334630350194553,
         
     | 
| 1477 | 
         
            +
                  "grad_norm": 5.620105281993818,
         
     | 
| 1478 | 
         
            +
                  "learning_rate": 1.4018586774455876e-05,
         
     | 
| 1479 | 
         
            +
                  "loss": 1.0801,
         
     | 
| 1480 | 
         
            +
                  "step": 210
         
     | 
| 1481 | 
         
            +
                },
         
     | 
| 1482 | 
         
            +
                {
         
     | 
| 1483 | 
         
            +
                  "epoch": 3.2490272373540856,
         
     | 
| 1484 | 
         
            +
                  "grad_norm": 8.270265614809485,
         
     | 
| 1485 | 
         
            +
                  "learning_rate": 1.3964823800206698e-05,
         
     | 
| 1486 | 
         
            +
                  "loss": 1.4172,
         
     | 
| 1487 | 
         
            +
                  "step": 211
         
     | 
| 1488 | 
         
            +
                },
         
     | 
| 1489 | 
         
            +
                {
         
     | 
| 1490 | 
         
            +
                  "epoch": 3.264591439688716,
         
     | 
| 1491 | 
         
            +
                  "grad_norm": 10.759581180311525,
         
     | 
| 1492 | 
         
            +
                  "learning_rate": 1.3910941884343144e-05,
         
     | 
| 1493 | 
         
            +
                  "loss": 1.3431,
         
     | 
| 1494 | 
         
            +
                  "step": 212
         
     | 
| 1495 | 
         
            +
                },
         
     | 
| 1496 | 
         
            +
                {
         
     | 
| 1497 | 
         
            +
                  "epoch": 3.280155642023346,
         
     | 
| 1498 | 
         
            +
                  "grad_norm": 9.256121877658135,
         
     | 
| 1499 | 
         
            +
                  "learning_rate": 1.3856943188478353e-05,
         
     | 
| 1500 | 
         
            +
                  "loss": 1.1614,
         
     | 
| 1501 | 
         
            +
                  "step": 213
         
     | 
| 1502 | 
         
            +
                },
         
     | 
| 1503 | 
         
            +
                {
         
     | 
| 1504 | 
         
            +
                  "epoch": 3.2957198443579765,
         
     | 
| 1505 | 
         
            +
                  "grad_norm": 8.278774263244427,
         
     | 
| 1506 | 
         
            +
                  "learning_rate": 1.3802829878910387e-05,
         
     | 
| 1507 | 
         
            +
                  "loss": 1.5056,
         
     | 
| 1508 | 
         
            +
                  "step": 214
         
     | 
| 1509 | 
         
            +
                },
         
     | 
| 1510 | 
         
            +
                {
         
     | 
| 1511 | 
         
            +
                  "epoch": 3.311284046692607,
         
     | 
| 1512 | 
         
            +
                  "grad_norm": 7.988122466692388,
         
     | 
| 1513 | 
         
            +
                  "learning_rate": 1.3748604126535335e-05,
         
     | 
| 1514 | 
         
            +
                  "loss": 1.1658,
         
     | 
| 1515 | 
         
            +
                  "step": 215
         
     | 
| 1516 | 
         
            +
                },
         
     | 
| 1517 | 
         
            +
                {
         
     | 
| 1518 | 
         
            +
                  "epoch": 3.3268482490272375,
         
     | 
| 1519 | 
         
            +
                  "grad_norm": 25.69960791189383,
         
     | 
| 1520 | 
         
            +
                  "learning_rate": 1.3694268106760225e-05,
         
     | 
| 1521 | 
         
            +
                  "loss": 1.5151,
         
     | 
| 1522 | 
         
            +
                  "step": 216
         
     | 
| 1523 | 
         
            +
                },
         
     | 
| 1524 | 
         
            +
                {
         
     | 
| 1525 | 
         
            +
                  "epoch": 3.342412451361868,
         
     | 
| 1526 | 
         
            +
                  "grad_norm": 6.716460926595037,
         
     | 
| 1527 | 
         
            +
                  "learning_rate": 1.3639823999415744e-05,
         
     | 
| 1528 | 
         
            +
                  "loss": 1.2914,
         
     | 
| 1529 | 
         
            +
                  "step": 217
         
     | 
| 1530 | 
         
            +
                },
         
     | 
| 1531 | 
         
            +
                {
         
     | 
| 1532 | 
         
            +
                  "epoch": 3.357976653696498,
         
     | 
| 1533 | 
         
            +
                  "grad_norm": 16.08954441850179,
         
     | 
| 1534 | 
         
            +
                  "learning_rate": 1.3585273988668804e-05,
         
     | 
| 1535 | 
         
            +
                  "loss": 1.2714,
         
     | 
| 1536 | 
         
            +
                  "step": 218
         
     | 
| 1537 | 
         
            +
                },
         
     | 
| 1538 | 
         
            +
                {
         
     | 
| 1539 | 
         
            +
                  "epoch": 3.3735408560311284,
         
     | 
| 1540 | 
         
            +
                  "grad_norm": 7.277025513470879,
         
     | 
| 1541 | 
         
            +
                  "learning_rate": 1.3530620262934892e-05,
         
     | 
| 1542 | 
         
            +
                  "loss": 1.3116,
         
     | 
| 1543 | 
         
            +
                  "step": 219
         
     | 
| 1544 | 
         
            +
                },
         
     | 
| 1545 | 
         
            +
                {
         
     | 
| 1546 | 
         
            +
                  "epoch": 3.3891050583657587,
         
     | 
| 1547 | 
         
            +
                  "grad_norm": 10.39394733050087,
         
     | 
| 1548 | 
         
            +
                  "learning_rate": 1.3475865014790303e-05,
         
     | 
| 1549 | 
         
            +
                  "loss": 1.3044,
         
     | 
| 1550 | 
         
            +
                  "step": 220
         
     | 
| 1551 | 
         
            +
                },
         
     | 
| 1552 | 
         
            +
                {
         
     | 
| 1553 | 
         
            +
                  "epoch": 3.404669260700389,
         
     | 
| 1554 | 
         
            +
                  "grad_norm": 6.793016895617423,
         
     | 
| 1555 | 
         
            +
                  "learning_rate": 1.342101044088416e-05,
         
     | 
| 1556 | 
         
            +
                  "loss": 1.6471,
         
     | 
| 1557 | 
         
            +
                  "step": 221
         
     | 
| 1558 | 
         
            +
                },
         
     | 
| 1559 | 
         
            +
                {
         
     | 
| 1560 | 
         
            +
                  "epoch": 3.4202334630350193,
         
     | 
| 1561 | 
         
            +
                  "grad_norm": 8.307309122049785,
         
     | 
| 1562 | 
         
            +
                  "learning_rate": 1.3366058741850302e-05,
         
     | 
| 1563 | 
         
            +
                  "loss": 1.1521,
         
     | 
| 1564 | 
         
            +
                  "step": 222
         
     | 
| 1565 | 
         
            +
                },
         
     | 
| 1566 | 
         
            +
                {
         
     | 
| 1567 | 
         
            +
                  "epoch": 3.43579766536965,
         
     | 
| 1568 | 
         
            +
                  "grad_norm": 11.961031173730973,
         
     | 
| 1569 | 
         
            +
                  "learning_rate": 1.3311012122218995e-05,
         
     | 
| 1570 | 
         
            +
                  "loss": 1.4236,
         
     | 
| 1571 | 
         
            +
                  "step": 223
         
     | 
| 1572 | 
         
            +
                },
         
     | 
| 1573 | 
         
            +
                {
         
     | 
| 1574 | 
         
            +
                  "epoch": 3.4513618677042803,
         
     | 
| 1575 | 
         
            +
                  "grad_norm": 12.085462142899722,
         
     | 
| 1576 | 
         
            +
                  "learning_rate": 1.3255872790328485e-05,
         
     | 
| 1577 | 
         
            +
                  "loss": 1.4304,
         
     | 
| 1578 | 
         
            +
                  "step": 224
         
     | 
| 1579 | 
         
            +
                },
         
     | 
| 1580 | 
         
            +
                {
         
     | 
| 1581 | 
         
            +
                  "epoch": 3.4669260700389106,
         
     | 
| 1582 | 
         
            +
                  "grad_norm": 12.043766601436674,
         
     | 
| 1583 | 
         
            +
                  "learning_rate": 1.320064295823642e-05,
         
     | 
| 1584 | 
         
            +
                  "loss": 1.105,
         
     | 
| 1585 | 
         
            +
                  "step": 225
         
     | 
| 1586 | 
         
            +
                },
         
     | 
| 1587 | 
         
            +
                {
         
     | 
| 1588 | 
         
            +
                  "epoch": 3.482490272373541,
         
     | 
| 1589 | 
         
            +
                  "grad_norm": 6.830210730411899,
         
     | 
| 1590 | 
         
            +
                  "learning_rate": 1.3145324841631093e-05,
         
     | 
| 1591 | 
         
            +
                  "loss": 1.0992,
         
     | 
| 1592 | 
         
            +
                  "step": 226
         
     | 
| 1593 | 
         
            +
                },
         
     | 
| 1594 | 
         
            +
                {
         
     | 
| 1595 | 
         
            +
                  "epoch": 3.498054474708171,
         
     | 
| 1596 | 
         
            +
                  "grad_norm": 5.183807189599878,
         
     | 
| 1597 | 
         
            +
                  "learning_rate": 1.3089920659742561e-05,
         
     | 
| 1598 | 
         
            +
                  "loss": 1.1104,
         
     | 
| 1599 | 
         
            +
                  "step": 227
         
     | 
| 1600 | 
         
            +
                },
         
     | 
| 1601 | 
         
            +
                {
         
     | 
| 1602 | 
         
            +
                  "epoch": 3.5136186770428015,
         
     | 
| 1603 | 
         
            +
                  "grad_norm": 8.173248197932283,
         
     | 
| 1604 | 
         
            +
                  "learning_rate": 1.3034432635253615e-05,
         
     | 
| 1605 | 
         
            +
                  "loss": 1.3416,
         
     | 
| 1606 | 
         
            +
                  "step": 228
         
     | 
| 1607 | 
         
            +
                },
         
     | 
| 1608 | 
         
            +
                {
         
     | 
| 1609 | 
         
            +
                  "epoch": 3.529182879377432,
         
     | 
| 1610 | 
         
            +
                  "grad_norm": 6.840510399122009,
         
     | 
| 1611 | 
         
            +
                  "learning_rate": 1.2978862994210609e-05,
         
     | 
| 1612 | 
         
            +
                  "loss": 1.1301,
         
     | 
| 1613 | 
         
            +
                  "step": 229
         
     | 
| 1614 | 
         
            +
                },
         
     | 
| 1615 | 
         
            +
                {
         
     | 
| 1616 | 
         
            +
                  "epoch": 3.544747081712062,
         
     | 
| 1617 | 
         
            +
                  "grad_norm": 6.795335624835114,
         
     | 
| 1618 | 
         
            +
                  "learning_rate": 1.2923213965934158e-05,
         
     | 
| 1619 | 
         
            +
                  "loss": 1.1824,
         
     | 
| 1620 | 
         
            +
                  "step": 230
         
     | 
| 1621 | 
         
            +
                },
         
     | 
| 1622 | 
         
            +
                {
         
     | 
| 1623 | 
         
            +
                  "epoch": 3.5603112840466924,
         
     | 
| 1624 | 
         
            +
                  "grad_norm": 12.487628895610484,
         
     | 
| 1625 | 
         
            +
                  "learning_rate": 1.2867487782929702e-05,
         
     | 
| 1626 | 
         
            +
                  "loss": 1.4,
         
     | 
| 1627 | 
         
            +
                  "step": 231
         
     | 
| 1628 | 
         
            +
                },
         
     | 
| 1629 | 
         
            +
                {
         
     | 
| 1630 | 
         
            +
                  "epoch": 3.5758754863813227,
         
     | 
| 1631 | 
         
            +
                  "grad_norm": 7.498953182696051,
         
     | 
| 1632 | 
         
            +
                  "learning_rate": 1.2811686680797942e-05,
         
     | 
| 1633 | 
         
            +
                  "loss": 1.0117,
         
     | 
| 1634 | 
         
            +
                  "step": 232
         
     | 
| 1635 | 
         
            +
                },
         
     | 
| 1636 | 
         
            +
                {
         
     | 
| 1637 | 
         
            +
                  "epoch": 3.5914396887159534,
         
     | 
| 1638 | 
         
            +
                  "grad_norm": 11.599970960534753,
         
     | 
| 1639 | 
         
            +
                  "learning_rate": 1.2755812898145157e-05,
         
     | 
| 1640 | 
         
            +
                  "loss": 1.457,
         
     | 
| 1641 | 
         
            +
                  "step": 233
         
     | 
| 1642 | 
         
            +
                },
         
     | 
| 1643 | 
         
            +
                {
         
     | 
| 1644 | 
         
            +
                  "epoch": 3.6070038910505837,
         
     | 
| 1645 | 
         
            +
                  "grad_norm": 10.728915546115905,
         
     | 
| 1646 | 
         
            +
                  "learning_rate": 1.269986867649339e-05,
         
     | 
| 1647 | 
         
            +
                  "loss": 1.0275,
         
     | 
| 1648 | 
         
            +
                  "step": 234
         
     | 
| 1649 | 
         
            +
                },
         
     | 
| 1650 | 
         
            +
                {
         
     | 
| 1651 | 
         
            +
                  "epoch": 3.622568093385214,
         
     | 
| 1652 | 
         
            +
                  "grad_norm": 7.212471068564826,
         
     | 
| 1653 | 
         
            +
                  "learning_rate": 1.2643856260190533e-05,
         
     | 
| 1654 | 
         
            +
                  "loss": 1.2426,
         
     | 
| 1655 | 
         
            +
                  "step": 235
         
     | 
| 1656 | 
         
            +
                },
         
     | 
| 1657 | 
         
            +
                {
         
     | 
| 1658 | 
         
            +
                  "epoch": 3.6381322957198443,
         
     | 
| 1659 | 
         
            +
                  "grad_norm": 7.257310852456952,
         
     | 
| 1660 | 
         
            +
                  "learning_rate": 1.2587777896320279e-05,
         
     | 
| 1661 | 
         
            +
                  "loss": 1.1293,
         
     | 
| 1662 | 
         
            +
                  "step": 236
         
     | 
| 1663 | 
         
            +
                },
         
     | 
| 1664 | 
         
            +
                {
         
     | 
| 1665 | 
         
            +
                  "epoch": 3.6536964980544746,
         
     | 
| 1666 | 
         
            +
                  "grad_norm": 5.41922031825349,
         
     | 
| 1667 | 
         
            +
                  "learning_rate": 1.2531635834611981e-05,
         
     | 
| 1668 | 
         
            +
                  "loss": 1.2113,
         
     | 
| 1669 | 
         
            +
                  "step": 237
         
     | 
| 1670 | 
         
            +
                },
         
     | 
| 1671 | 
         
            +
                {
         
     | 
| 1672 | 
         
            +
                  "epoch": 3.669260700389105,
         
     | 
| 1673 | 
         
            +
                  "grad_norm": 6.480868678608291,
         
     | 
| 1674 | 
         
            +
                  "learning_rate": 1.2475432327350396e-05,
         
     | 
| 1675 | 
         
            +
                  "loss": 1.2706,
         
     | 
| 1676 | 
         
            +
                  "step": 238
         
     | 
| 1677 | 
         
            +
                },
         
     | 
| 1678 | 
         
            +
                {
         
     | 
| 1679 | 
         
            +
                  "epoch": 3.6848249027237356,
         
     | 
| 1680 | 
         
            +
                  "grad_norm": 6.80783495792981,
         
     | 
| 1681 | 
         
            +
                  "learning_rate": 1.2419169629285335e-05,
         
     | 
| 1682 | 
         
            +
                  "loss": 1.0698,
         
     | 
| 1683 | 
         
            +
                  "step": 239
         
     | 
| 1684 | 
         
            +
                },
         
     | 
| 1685 | 
         
            +
                {
         
     | 
| 1686 | 
         
            +
                  "epoch": 3.700389105058366,
         
     | 
| 1687 | 
         
            +
                  "grad_norm": 7.234690686703731,
         
     | 
| 1688 | 
         
            +
                  "learning_rate": 1.236284999754119e-05,
         
     | 
| 1689 | 
         
            +
                  "loss": 1.4999,
         
     | 
| 1690 | 
         
            +
                  "step": 240
         
     | 
| 1691 | 
         
            +
                },
         
     | 
| 1692 | 
         
            +
                {
         
     | 
| 1693 | 
         
            +
                  "epoch": 3.7159533073929962,
         
     | 
| 1694 | 
         
            +
                  "grad_norm": 9.331788321366725,
         
     | 
| 1695 | 
         
            +
                  "learning_rate": 1.2306475691526407e-05,
         
     | 
| 1696 | 
         
            +
                  "loss": 1.0748,
         
     | 
| 1697 | 
         
            +
                  "step": 241
         
     | 
| 1698 | 
         
            +
                },
         
     | 
| 1699 | 
         
            +
                {
         
     | 
| 1700 | 
         
            +
                  "epoch": 3.7315175097276265,
         
     | 
| 1701 | 
         
            +
                  "grad_norm": 7.024835954207243,
         
     | 
| 1702 | 
         
            +
                  "learning_rate": 1.2250048972842823e-05,
         
     | 
| 1703 | 
         
            +
                  "loss": 1.241,
         
     | 
| 1704 | 
         
            +
                  "step": 242
         
     | 
| 1705 | 
         
            +
                },
         
     | 
| 1706 | 
         
            +
                {
         
     | 
| 1707 | 
         
            +
                  "epoch": 3.747081712062257,
         
     | 
| 1708 | 
         
            +
                  "grad_norm": 8.381718396842675,
         
     | 
| 1709 | 
         
            +
                  "learning_rate": 1.2193572105194953e-05,
         
     | 
| 1710 | 
         
            +
                  "loss": 1.4368,
         
     | 
| 1711 | 
         
            +
                  "step": 243
         
     | 
| 1712 | 
         
            +
                },
         
     | 
| 1713 | 
         
            +
                {
         
     | 
| 1714 | 
         
            +
                  "epoch": 3.762645914396887,
         
     | 
| 1715 | 
         
            +
                  "grad_norm": 8.467657085467351,
         
     | 
| 1716 | 
         
            +
                  "learning_rate": 1.2137047354299165e-05,
         
     | 
| 1717 | 
         
            +
                  "loss": 1.1316,
         
     | 
| 1718 | 
         
            +
                  "step": 244
         
     | 
| 1719 | 
         
            +
                },
         
     | 
| 1720 | 
         
            +
                {
         
     | 
| 1721 | 
         
            +
                  "epoch": 3.7782101167315174,
         
     | 
| 1722 | 
         
            +
                  "grad_norm": 6.56413902354136,
         
     | 
| 1723 | 
         
            +
                  "learning_rate": 1.2080476987792786e-05,
         
     | 
| 1724 | 
         
            +
                  "loss": 1.2924,
         
     | 
| 1725 | 
         
            +
                  "step": 245
         
     | 
| 1726 | 
         
            +
                },
         
     | 
| 1727 | 
         
            +
                {
         
     | 
| 1728 | 
         
            +
                  "epoch": 3.7937743190661477,
         
     | 
| 1729 | 
         
            +
                  "grad_norm": 11.457150627414464,
         
     | 
| 1730 | 
         
            +
                  "learning_rate": 1.2023863275143138e-05,
         
     | 
| 1731 | 
         
            +
                  "loss": 1.1807,
         
     | 
| 1732 | 
         
            +
                  "step": 246
         
     | 
| 1733 | 
         
            +
                },
         
     | 
| 1734 | 
         
            +
                {
         
     | 
| 1735 | 
         
            +
                  "epoch": 3.809338521400778,
         
     | 
| 1736 | 
         
            +
                  "grad_norm": 11.784032118298212,
         
     | 
| 1737 | 
         
            +
                  "learning_rate": 1.1967208487556477e-05,
         
     | 
| 1738 | 
         
            +
                  "loss": 1.3601,
         
     | 
| 1739 | 
         
            +
                  "step": 247
         
     | 
| 1740 | 
         
            +
                },
         
     | 
| 1741 | 
         
            +
                {
         
     | 
| 1742 | 
         
            +
                  "epoch": 3.8249027237354083,
         
     | 
| 1743 | 
         
            +
                  "grad_norm": 7.618595171387596,
         
     | 
| 1744 | 
         
            +
                  "learning_rate": 1.1910514897886892e-05,
         
     | 
| 1745 | 
         
            +
                  "loss": 1.0757,
         
     | 
| 1746 | 
         
            +
                  "step": 248
         
     | 
| 1747 | 
         
            +
                },
         
     | 
| 1748 | 
         
            +
                {
         
     | 
| 1749 | 
         
            +
                  "epoch": 3.840466926070039,
         
     | 
| 1750 | 
         
            +
                  "grad_norm": 15.913563034938784,
         
     | 
| 1751 | 
         
            +
                  "learning_rate": 1.1853784780545123e-05,
         
     | 
| 1752 | 
         
            +
                  "loss": 1.381,
         
     | 
| 1753 | 
         
            +
                  "step": 249
         
     | 
| 1754 | 
         
            +
                },
         
     | 
| 1755 | 
         
            +
                {
         
     | 
| 1756 | 
         
            +
                  "epoch": 3.8560311284046693,
         
     | 
| 1757 | 
         
            +
                  "grad_norm": 9.448026664890813,
         
     | 
| 1758 | 
         
            +
                  "learning_rate": 1.1797020411407303e-05,
         
     | 
| 1759 | 
         
            +
                  "loss": 1.1996,
         
     | 
| 1760 | 
         
            +
                  "step": 250
         
     | 
| 1761 | 
         
            +
                },
         
     | 
| 1762 | 
         
            +
                {
         
     | 
| 1763 | 
         
            +
                  "epoch": 3.8715953307392996,
         
     | 
| 1764 | 
         
            +
                  "grad_norm": 9.36740858593225,
         
     | 
| 1765 | 
         
            +
                  "learning_rate": 1.1740224067723676e-05,
         
     | 
| 1766 | 
         
            +
                  "loss": 1.3333,
         
     | 
| 1767 | 
         
            +
                  "step": 251
         
     | 
| 1768 | 
         
            +
                },
         
     | 
| 1769 | 
         
            +
                {
         
     | 
| 1770 | 
         
            +
                  "epoch": 3.88715953307393,
         
     | 
| 1771 | 
         
            +
                  "grad_norm": 6.202904532995713,
         
     | 
| 1772 | 
         
            +
                  "learning_rate": 1.1683398028027218e-05,
         
     | 
| 1773 | 
         
            +
                  "loss": 1.0989,
         
     | 
| 1774 | 
         
            +
                  "step": 252
         
     | 
| 1775 | 
         
            +
                },
         
     | 
| 1776 | 
         
            +
                {
         
     | 
| 1777 | 
         
            +
                  "epoch": 3.90272373540856,
         
     | 
| 1778 | 
         
            +
                  "grad_norm": 6.807434814034836,
         
     | 
| 1779 | 
         
            +
                  "learning_rate": 1.162654457204224e-05,
         
     | 
| 1780 | 
         
            +
                  "loss": 1.0997,
         
     | 
| 1781 | 
         
            +
                  "step": 253
         
     | 
| 1782 | 
         
            +
                },
         
     | 
| 1783 | 
         
            +
                {
         
     | 
| 1784 | 
         
            +
                  "epoch": 3.9182879377431905,
         
     | 
| 1785 | 
         
            +
                  "grad_norm": 7.474757873337292,
         
     | 
| 1786 | 
         
            +
                  "learning_rate": 1.1569665980592934e-05,
         
     | 
| 1787 | 
         
            +
                  "loss": 1.3777,
         
     | 
| 1788 | 
         
            +
                  "step": 254
         
     | 
| 1789 | 
         
            +
                },
         
     | 
| 1790 | 
         
            +
                {
         
     | 
| 1791 | 
         
            +
                  "epoch": 3.9338521400778212,
         
     | 
| 1792 | 
         
            +
                  "grad_norm": 10.348844472235802,
         
     | 
| 1793 | 
         
            +
                  "learning_rate": 1.1512764535511862e-05,
         
     | 
| 1794 | 
         
            +
                  "loss": 1.4729,
         
     | 
| 1795 | 
         
            +
                  "step": 255
         
     | 
| 1796 | 
         
            +
                },
         
     | 
| 1797 | 
         
            +
                {
         
     | 
| 1798 | 
         
            +
                  "epoch": 3.9494163424124515,
         
     | 
| 1799 | 
         
            +
                  "grad_norm": 9.595077808457091,
         
     | 
| 1800 | 
         
            +
                  "learning_rate": 1.1455842519548417e-05,
         
     | 
| 1801 | 
         
            +
                  "loss": 1.1649,
         
     | 
| 1802 | 
         
            +
                  "step": 256
         
     | 
| 1803 | 
         
            +
                },
         
     | 
| 1804 | 
         
            +
                {
         
     | 
| 1805 | 
         
            +
                  "epoch": 3.964980544747082,
         
     | 
| 1806 | 
         
            +
                  "grad_norm": 12.298828537750817,
         
     | 
| 1807 | 
         
            +
                  "learning_rate": 1.139890221627725e-05,
         
     | 
| 1808 | 
         
            +
                  "loss": 1.1849,
         
     | 
| 1809 | 
         
            +
                  "step": 257
         
     | 
| 1810 | 
         
            +
                },
         
     | 
| 1811 | 
         
            +
                {
         
     | 
| 1812 | 
         
            +
                  "epoch": 3.980544747081712,
         
     | 
| 1813 | 
         
            +
                  "grad_norm": 9.042727283207734,
         
     | 
| 1814 | 
         
            +
                  "learning_rate": 1.1341945910006656e-05,
         
     | 
| 1815 | 
         
            +
                  "loss": 1.3065,
         
     | 
| 1816 | 
         
            +
                  "step": 258
         
     | 
| 1817 | 
         
            +
                },
         
     | 
| 1818 | 
         
            +
                {
         
     | 
| 1819 | 
         
            +
                  "epoch": 3.9961089494163424,
         
     | 
| 1820 | 
         
            +
                  "grad_norm": 7.530830029119465,
         
     | 
| 1821 | 
         
            +
                  "learning_rate": 1.1284975885686926e-05,
         
     | 
| 1822 | 
         
            +
                  "loss": 1.2184,
         
     | 
| 1823 | 
         
            +
                  "step": 259
         
     | 
| 1824 | 
         
            +
                },
         
     | 
| 1825 | 
         
            +
                {
         
     | 
| 1826 | 
         
            +
                  "epoch": 4.0,
         
     | 
| 1827 | 
         
            +
                  "grad_norm": 7.889222687866491,
         
     | 
| 1828 | 
         
            +
                  "learning_rate": 1.1227994428818692e-05,
         
     | 
| 1829 | 
         
            +
                  "loss": 0.4148,
         
     | 
| 1830 | 
         
            +
                  "step": 260
         
     | 
| 1831 | 
         
            +
                },
         
     | 
| 1832 | 
         
            +
                {
         
     | 
| 1833 | 
         
            +
                  "epoch": 4.01556420233463,
         
     | 
| 1834 | 
         
            +
                  "grad_norm": 7.442531502305794,
         
     | 
| 1835 | 
         
            +
                  "learning_rate": 1.1171003825361233e-05,
         
     | 
| 1836 | 
         
            +
                  "loss": 1.2908,
         
     | 
| 1837 | 
         
            +
                  "step": 261
         
     | 
| 1838 | 
         
            +
                },
         
     | 
| 1839 | 
         
            +
                {
         
     | 
| 1840 | 
         
            +
                  "epoch": 4.031128404669261,
         
     | 
| 1841 | 
         
            +
                  "grad_norm": 5.5861970223083945,
         
     | 
| 1842 | 
         
            +
                  "learning_rate": 1.1114006361640763e-05,
         
     | 
| 1843 | 
         
            +
                  "loss": 1.1309,
         
     | 
| 1844 | 
         
            +
                  "step": 262
         
     | 
| 1845 | 
         
            +
                },
         
     | 
| 1846 | 
         
            +
                {
         
     | 
| 1847 | 
         
            +
                  "epoch": 4.046692607003891,
         
     | 
| 1848 | 
         
            +
                  "grad_norm": 7.19625969261882,
         
     | 
| 1849 | 
         
            +
                  "learning_rate": 1.105700432425871e-05,
         
     | 
| 1850 | 
         
            +
                  "loss": 1.149,
         
     | 
| 1851 | 
         
            +
                  "step": 263
         
     | 
| 1852 | 
         
            +
                },
         
     | 
| 1853 | 
         
            +
                {
         
     | 
| 1854 | 
         
            +
                  "epoch": 4.062256809338521,
         
     | 
| 1855 | 
         
            +
                  "grad_norm": 7.319827903412528,
         
     | 
| 1856 | 
         
            +
                  "learning_rate": 1.1000000000000001e-05,
         
     | 
| 1857 | 
         
            +
                  "loss": 1.2781,
         
     | 
| 1858 | 
         
            +
                  "step": 264
         
     | 
| 1859 | 
         
            +
                },
         
     | 
| 1860 | 
         
            +
                {
         
     | 
| 1861 | 
         
            +
                  "epoch": 4.0778210116731515,
         
     | 
| 1862 | 
         
            +
                  "grad_norm": 8.579128780187071,
         
     | 
| 1863 | 
         
            +
                  "learning_rate": 1.094299567574129e-05,
         
     | 
| 1864 | 
         
            +
                  "loss": 1.3237,
         
     | 
| 1865 | 
         
            +
                  "step": 265
         
     | 
| 1866 | 
         
            +
                },
         
     | 
| 1867 | 
         
            +
                {
         
     | 
| 1868 | 
         
            +
                  "epoch": 4.093385214007782,
         
     | 
| 1869 | 
         
            +
                  "grad_norm": 6.407050357928513,
         
     | 
| 1870 | 
         
            +
                  "learning_rate": 1.0885993638359242e-05,
         
     | 
| 1871 | 
         
            +
                  "loss": 1.4427,
         
     | 
| 1872 | 
         
            +
                  "step": 266
         
     | 
| 1873 | 
         
            +
                },
         
     | 
| 1874 | 
         
            +
                {
         
     | 
| 1875 | 
         
            +
                  "epoch": 4.108949416342412,
         
     | 
| 1876 | 
         
            +
                  "grad_norm": 8.132256321978414,
         
     | 
| 1877 | 
         
            +
                  "learning_rate": 1.0828996174638768e-05,
         
     | 
| 1878 | 
         
            +
                  "loss": 1.394,
         
     | 
| 1879 | 
         
            +
                  "step": 267
         
     | 
| 1880 | 
         
            +
                },
         
     | 
| 1881 | 
         
            +
                {
         
     | 
| 1882 | 
         
            +
                  "epoch": 4.124513618677042,
         
     | 
| 1883 | 
         
            +
                  "grad_norm": 6.3812196369738645,
         
     | 
| 1884 | 
         
            +
                  "learning_rate": 1.0772005571181313e-05,
         
     | 
| 1885 | 
         
            +
                  "loss": 1.1583,
         
     | 
| 1886 | 
         
            +
                  "step": 268
         
     | 
| 1887 | 
         
            +
                },
         
     | 
| 1888 | 
         
            +
                {
         
     | 
| 1889 | 
         
            +
                  "epoch": 4.1400778210116735,
         
     | 
| 1890 | 
         
            +
                  "grad_norm": 13.85707637253458,
         
     | 
| 1891 | 
         
            +
                  "learning_rate": 1.0715024114313077e-05,
         
     | 
| 1892 | 
         
            +
                  "loss": 1.4429,
         
     | 
| 1893 | 
         
            +
                  "step": 269
         
     | 
| 1894 | 
         
            +
                },
         
     | 
| 1895 | 
         
            +
                {
         
     | 
| 1896 | 
         
            +
                  "epoch": 4.155642023346304,
         
     | 
| 1897 | 
         
            +
                  "grad_norm": 21.528973223922698,
         
     | 
| 1898 | 
         
            +
                  "learning_rate": 1.0658054089993349e-05,
         
     | 
| 1899 | 
         
            +
                  "loss": 1.4796,
         
     | 
| 1900 | 
         
            +
                  "step": 270
         
     | 
| 1901 | 
         
            +
                },
         
     | 
| 1902 | 
         
            +
                {
         
     | 
| 1903 | 
         
            +
                  "epoch": 4.171206225680934,
         
     | 
| 1904 | 
         
            +
                  "grad_norm": 6.331202799716651,
         
     | 
| 1905 | 
         
            +
                  "learning_rate": 1.0601097783722751e-05,
         
     | 
| 1906 | 
         
            +
                  "loss": 1.2535,
         
     | 
| 1907 | 
         
            +
                  "step": 271
         
     | 
| 1908 | 
         
            +
                },
         
     | 
| 1909 | 
         
            +
                {
         
     | 
| 1910 | 
         
            +
                  "epoch": 4.186770428015564,
         
     | 
| 1911 | 
         
            +
                  "grad_norm": 6.192886059438914,
         
     | 
| 1912 | 
         
            +
                  "learning_rate": 1.0544157480451586e-05,
         
     | 
| 1913 | 
         
            +
                  "loss": 1.365,
         
     | 
| 1914 | 
         
            +
                  "step": 272
         
     | 
| 1915 | 
         
            +
                },
         
     | 
| 1916 | 
         
            +
                {
         
     | 
| 1917 | 
         
            +
                  "epoch": 4.202334630350195,
         
     | 
| 1918 | 
         
            +
                  "grad_norm": 9.092962942972557,
         
     | 
| 1919 | 
         
            +
                  "learning_rate": 1.048723546448814e-05,
         
     | 
| 1920 | 
         
            +
                  "loss": 1.2418,
         
     | 
| 1921 | 
         
            +
                  "step": 273
         
     | 
| 1922 | 
         
            +
                },
         
     | 
| 1923 | 
         
            +
                {
         
     | 
| 1924 | 
         
            +
                  "epoch": 4.217898832684825,
         
     | 
| 1925 | 
         
            +
                  "grad_norm": 9.088713897338243,
         
     | 
| 1926 | 
         
            +
                  "learning_rate": 1.0430334019407069e-05,
         
     | 
| 1927 | 
         
            +
                  "loss": 1.5284,
         
     | 
| 1928 | 
         
            +
                  "step": 274
         
     | 
| 1929 | 
         
            +
                },
         
     | 
| 1930 | 
         
            +
                {
         
     | 
| 1931 | 
         
            +
                  "epoch": 4.233463035019455,
         
     | 
| 1932 | 
         
            +
                  "grad_norm": 7.935649176333802,
         
     | 
| 1933 | 
         
            +
                  "learning_rate": 1.0373455427957762e-05,
         
     | 
| 1934 | 
         
            +
                  "loss": 1.1285,
         
     | 
| 1935 | 
         
            +
                  "step": 275
         
     | 
| 1936 | 
         
            +
                },
         
     | 
| 1937 | 
         
            +
                {
         
     | 
| 1938 | 
         
            +
                  "epoch": 4.249027237354086,
         
     | 
| 1939 | 
         
            +
                  "grad_norm": 6.747077137812801,
         
     | 
| 1940 | 
         
            +
                  "learning_rate": 1.0316601971972787e-05,
         
     | 
| 1941 | 
         
            +
                  "loss": 1.1133,
         
     | 
| 1942 | 
         
            +
                  "step": 276
         
     | 
| 1943 | 
         
            +
                },
         
     | 
| 1944 | 
         
            +
                {
         
     | 
| 1945 | 
         
            +
                  "epoch": 4.264591439688716,
         
     | 
| 1946 | 
         
            +
                  "grad_norm": 6.190409945190735,
         
     | 
| 1947 | 
         
            +
                  "learning_rate": 1.0259775932276325e-05,
         
     | 
| 1948 | 
         
            +
                  "loss": 1.309,
         
     | 
| 1949 | 
         
            +
                  "step": 277
         
     | 
| 1950 | 
         
            +
                },
         
     | 
| 1951 | 
         
            +
                {
         
     | 
| 1952 | 
         
            +
                  "epoch": 4.280155642023346,
         
     | 
| 1953 | 
         
            +
                  "grad_norm": 7.659504834977579,
         
     | 
| 1954 | 
         
            +
                  "learning_rate": 1.0202979588592702e-05,
         
     | 
| 1955 | 
         
            +
                  "loss": 1.134,
         
     | 
| 1956 | 
         
            +
                  "step": 278
         
     | 
| 1957 | 
         
            +
                },
         
     | 
| 1958 | 
         
            +
                {
         
     | 
| 1959 | 
         
            +
                  "epoch": 4.2957198443579765,
         
     | 
| 1960 | 
         
            +
                  "grad_norm": 6.035623878393655,
         
     | 
| 1961 | 
         
            +
                  "learning_rate": 1.0146215219454882e-05,
         
     | 
| 1962 | 
         
            +
                  "loss": 1.1742,
         
     | 
| 1963 | 
         
            +
                  "step": 279
         
     | 
| 1964 | 
         
            +
                },
         
     | 
| 1965 | 
         
            +
                {
         
     | 
| 1966 | 
         
            +
                  "epoch": 4.311284046692607,
         
     | 
| 1967 | 
         
            +
                  "grad_norm": 14.743602784497947,
         
     | 
| 1968 | 
         
            +
                  "learning_rate": 1.0089485102113113e-05,
         
     | 
| 1969 | 
         
            +
                  "loss": 1.2349,
         
     | 
| 1970 | 
         
            +
                  "step": 280
         
     | 
| 1971 | 
         
            +
                },
         
     | 
| 1972 | 
         
            +
                {
         
     | 
| 1973 | 
         
            +
                  "epoch": 4.326848249027237,
         
     | 
| 1974 | 
         
            +
                  "grad_norm": 34.32059780452938,
         
     | 
| 1975 | 
         
            +
                  "learning_rate": 1.0032791512443527e-05,
         
     | 
| 1976 | 
         
            +
                  "loss": 1.1312,
         
     | 
| 1977 | 
         
            +
                  "step": 281
         
     | 
| 1978 | 
         
            +
                },
         
     | 
| 1979 | 
         
            +
                {
         
     | 
| 1980 | 
         
            +
                  "epoch": 4.342412451361867,
         
     | 
| 1981 | 
         
            +
                  "grad_norm": 6.537432858619071,
         
     | 
| 1982 | 
         
            +
                  "learning_rate": 9.976136724856869e-06,
         
     | 
| 1983 | 
         
            +
                  "loss": 1.2261,
         
     | 
| 1984 | 
         
            +
                  "step": 282
         
     | 
| 1985 | 
         
            +
                },
         
     | 
| 1986 | 
         
            +
                {
         
     | 
| 1987 | 
         
            +
                  "epoch": 4.357976653696498,
         
     | 
| 1988 | 
         
            +
                  "grad_norm": 6.805760395487357,
         
     | 
| 1989 | 
         
            +
                  "learning_rate": 9.919523012207217e-06,
         
     | 
| 1990 | 
         
            +
                  "loss": 1.2109,
         
     | 
| 1991 | 
         
            +
                  "step": 283
         
     | 
| 1992 | 
         
            +
                },
         
     | 
| 1993 | 
         
            +
                {
         
     | 
| 1994 | 
         
            +
                  "epoch": 4.373540856031129,
         
     | 
| 1995 | 
         
            +
                  "grad_norm": 8.577713820915738,
         
     | 
| 1996 | 
         
            +
                  "learning_rate": 9.862952645700841e-06,
         
     | 
| 1997 | 
         
            +
                  "loss": 1.4719,
         
     | 
| 1998 | 
         
            +
                  "step": 284
         
     | 
| 1999 | 
         
            +
                },
         
     | 
| 2000 | 
         
            +
                {
         
     | 
| 2001 | 
         
            +
                  "epoch": 4.389105058365759,
         
     | 
| 2002 | 
         
            +
                  "grad_norm": 5.490284251492559,
         
     | 
| 2003 | 
         
            +
                  "learning_rate": 9.806427894805048e-06,
         
     | 
| 2004 | 
         
            +
                  "loss": 1.312,
         
     | 
| 2005 | 
         
            +
                  "step": 285
         
     | 
| 2006 | 
         
            +
                },
         
     | 
| 2007 | 
         
            +
                {
         
     | 
| 2008 | 
         
            +
                  "epoch": 4.404669260700389,
         
     | 
| 2009 | 
         
            +
                  "grad_norm": 8.327345481649647,
         
     | 
| 2010 | 
         
            +
                  "learning_rate": 9.74995102715718e-06,
         
     | 
| 2011 | 
         
            +
                  "loss": 1.2461,
         
     | 
| 2012 | 
         
            +
                  "step": 286
         
     | 
| 2013 | 
         
            +
                },
         
     | 
| 2014 | 
         
            +
                {
         
     | 
| 2015 | 
         
            +
                  "epoch": 4.42023346303502,
         
     | 
| 2016 | 
         
            +
                  "grad_norm": 6.111747370932479,
         
     | 
| 2017 | 
         
            +
                  "learning_rate": 9.693524308473596e-06,
         
     | 
| 2018 | 
         
            +
                  "loss": 1.3926,
         
     | 
| 2019 | 
         
            +
                  "step": 287
         
     | 
| 2020 | 
         
            +
                },
         
     | 
| 2021 | 
         
            +
                {
         
     | 
| 2022 | 
         
            +
                  "epoch": 4.43579766536965,
         
     | 
| 2023 | 
         
            +
                  "grad_norm": 8.141202180806642,
         
     | 
| 2024 | 
         
            +
                  "learning_rate": 9.637150002458813e-06,
         
     | 
| 2025 | 
         
            +
                  "loss": 1.2008,
         
     | 
| 2026 | 
         
            +
                  "step": 288
         
     | 
| 2027 | 
         
            +
                },
         
     | 
| 2028 | 
         
            +
                {
         
     | 
| 2029 | 
         
            +
                  "epoch": 4.45136186770428,
         
     | 
| 2030 | 
         
            +
                  "grad_norm": 9.12454930317004,
         
     | 
| 2031 | 
         
            +
                  "learning_rate": 9.58083037071467e-06,
         
     | 
| 2032 | 
         
            +
                  "loss": 1.095,
         
     | 
| 2033 | 
         
            +
                  "step": 289
         
     | 
| 2034 | 
         
            +
                },
         
     | 
| 2035 | 
         
            +
                {
         
     | 
| 2036 | 
         
            +
                  "epoch": 4.466926070038911,
         
     | 
| 2037 | 
         
            +
                  "grad_norm": 8.109583837297146,
         
     | 
| 2038 | 
         
            +
                  "learning_rate": 9.524567672649606e-06,
         
     | 
| 2039 | 
         
            +
                  "loss": 1.1697,
         
     | 
| 2040 | 
         
            +
                  "step": 290
         
     | 
| 2041 | 
         
            +
                },
         
     | 
| 2042 | 
         
            +
                {
         
     | 
| 2043 | 
         
            +
                  "epoch": 4.482490272373541,
         
     | 
| 2044 | 
         
            +
                  "grad_norm": 6.940093378941827,
         
     | 
| 2045 | 
         
            +
                  "learning_rate": 9.468364165388022e-06,
         
     | 
| 2046 | 
         
            +
                  "loss": 1.5673,
         
     | 
| 2047 | 
         
            +
                  "step": 291
         
     | 
| 2048 | 
         
            +
                },
         
     | 
| 2049 | 
         
            +
                {
         
     | 
| 2050 | 
         
            +
                  "epoch": 4.498054474708171,
         
     | 
| 2051 | 
         
            +
                  "grad_norm": 8.029108149503637,
         
     | 
| 2052 | 
         
            +
                  "learning_rate": 9.412222103679728e-06,
         
     | 
| 2053 | 
         
            +
                  "loss": 1.4509,
         
     | 
| 2054 | 
         
            +
                  "step": 292
         
     | 
| 2055 | 
         
            +
                },
         
     | 
| 2056 | 
         
            +
                {
         
     | 
| 2057 | 
         
            +
                  "epoch": 4.5136186770428015,
         
     | 
| 2058 | 
         
            +
                  "grad_norm": 7.3797751032553345,
         
     | 
| 2059 | 
         
            +
                  "learning_rate": 9.356143739809472e-06,
         
     | 
| 2060 | 
         
            +
                  "loss": 1.1467,
         
     | 
| 2061 | 
         
            +
                  "step": 293
         
     | 
| 2062 | 
         
            +
                },
         
     | 
| 2063 | 
         
            +
                {
         
     | 
| 2064 | 
         
            +
                  "epoch": 4.529182879377432,
         
     | 
| 2065 | 
         
            +
                  "grad_norm": 11.435244944138505,
         
     | 
| 2066 | 
         
            +
                  "learning_rate": 9.300131323506617e-06,
         
     | 
| 2067 | 
         
            +
                  "loss": 1.1765,
         
     | 
| 2068 | 
         
            +
                  "step": 294
         
     | 
| 2069 | 
         
            +
                },
         
     | 
| 2070 | 
         
            +
                {
         
     | 
| 2071 | 
         
            +
                  "epoch": 4.544747081712062,
         
     | 
| 2072 | 
         
            +
                  "grad_norm": 9.332357406857541,
         
     | 
| 2073 | 
         
            +
                  "learning_rate": 9.244187101854846e-06,
         
     | 
| 2074 | 
         
            +
                  "loss": 1.2847,
         
     | 
| 2075 | 
         
            +
                  "step": 295
         
     | 
| 2076 | 
         
            +
                },
         
     | 
| 2077 | 
         
            +
                {
         
     | 
| 2078 | 
         
            +
                  "epoch": 4.560311284046692,
         
     | 
| 2079 | 
         
            +
                  "grad_norm": 7.009210489706155,
         
     | 
| 2080 | 
         
            +
                  "learning_rate": 9.188313319202057e-06,
         
     | 
| 2081 | 
         
            +
                  "loss": 1.3632,
         
     | 
| 2082 | 
         
            +
                  "step": 296
         
     | 
| 2083 | 
         
            +
                },
         
     | 
| 2084 | 
         
            +
                {
         
     | 
| 2085 | 
         
            +
                  "epoch": 4.575875486381323,
         
     | 
| 2086 | 
         
            +
                  "grad_norm": 8.623706053802827,
         
     | 
| 2087 | 
         
            +
                  "learning_rate": 9.132512217070301e-06,
         
     | 
| 2088 | 
         
            +
                  "loss": 1.3002,
         
     | 
| 2089 | 
         
            +
                  "step": 297
         
     | 
| 2090 | 
         
            +
                },
         
     | 
| 2091 | 
         
            +
                {
         
     | 
| 2092 | 
         
            +
                  "epoch": 4.591439688715953,
         
     | 
| 2093 | 
         
            +
                  "grad_norm": 7.10477639818194,
         
     | 
| 2094 | 
         
            +
                  "learning_rate": 9.076786034065843e-06,
         
     | 
| 2095 | 
         
            +
                  "loss": 1.2559,
         
     | 
| 2096 | 
         
            +
                  "step": 298
         
     | 
| 2097 | 
         
            +
                },
         
     | 
| 2098 | 
         
            +
                {
         
     | 
| 2099 | 
         
            +
                  "epoch": 4.607003891050583,
         
     | 
| 2100 | 
         
            +
                  "grad_norm": 16.24344498357353,
         
     | 
| 2101 | 
         
            +
                  "learning_rate": 9.021137005789393e-06,
         
     | 
| 2102 | 
         
            +
                  "loss": 1.3538,
         
     | 
| 2103 | 
         
            +
                  "step": 299
         
     | 
| 2104 | 
         
            +
                },
         
     | 
| 2105 | 
         
            +
                {
         
     | 
| 2106 | 
         
            +
                  "epoch": 4.622568093385214,
         
     | 
| 2107 | 
         
            +
                  "grad_norm": 5.84525864201727,
         
     | 
| 2108 | 
         
            +
                  "learning_rate": 8.965567364746388e-06,
         
     | 
| 2109 | 
         
            +
                  "loss": 1.2,
         
     | 
| 2110 | 
         
            +
                  "step": 300
         
     | 
| 2111 | 
         
            +
                },
         
     | 
| 2112 | 
         
            +
                {
         
     | 
| 2113 | 
         
            +
                  "epoch": 4.638132295719844,
         
     | 
| 2114 | 
         
            +
                  "grad_norm": 14.06325953577117,
         
     | 
| 2115 | 
         
            +
                  "learning_rate": 8.910079340257442e-06,
         
     | 
| 2116 | 
         
            +
                  "loss": 1.1461,
         
     | 
| 2117 | 
         
            +
                  "step": 301
         
     | 
| 2118 | 
         
            +
                },
         
     | 
| 2119 | 
         
            +
                {
         
     | 
| 2120 | 
         
            +
                  "epoch": 4.653696498054475,
         
     | 
| 2121 | 
         
            +
                  "grad_norm": 7.743168891211479,
         
     | 
| 2122 | 
         
            +
                  "learning_rate": 8.854675158368908e-06,
         
     | 
| 2123 | 
         
            +
                  "loss": 1.3149,
         
     | 
| 2124 | 
         
            +
                  "step": 302
         
     | 
| 2125 | 
         
            +
                },
         
     | 
| 2126 | 
         
            +
                {
         
     | 
| 2127 | 
         
            +
                  "epoch": 4.669260700389105,
         
     | 
| 2128 | 
         
            +
                  "grad_norm": 5.646503833678955,
         
     | 
| 2129 | 
         
            +
                  "learning_rate": 8.799357041763583e-06,
         
     | 
| 2130 | 
         
            +
                  "loss": 1.1332,
         
     | 
| 2131 | 
         
            +
                  "step": 303
         
     | 
| 2132 | 
         
            +
                },
         
     | 
| 2133 | 
         
            +
                {
         
     | 
| 2134 | 
         
            +
                  "epoch": 4.684824902723736,
         
     | 
| 2135 | 
         
            +
                  "grad_norm": 7.846526869237445,
         
     | 
| 2136 | 
         
            +
                  "learning_rate": 8.744127209671516e-06,
         
     | 
| 2137 | 
         
            +
                  "loss": 1.2928,
         
     | 
| 2138 | 
         
            +
                  "step": 304
         
     | 
| 2139 | 
         
            +
                },
         
     | 
| 2140 | 
         
            +
                {
         
     | 
| 2141 | 
         
            +
                  "epoch": 4.700389105058366,
         
     | 
| 2142 | 
         
            +
                  "grad_norm": 24.312874392544217,
         
     | 
| 2143 | 
         
            +
                  "learning_rate": 8.688987877781008e-06,
         
     | 
| 2144 | 
         
            +
                  "loss": 1.4022,
         
     | 
| 2145 | 
         
            +
                  "step": 305
         
     | 
| 2146 | 
         
            +
                },
         
     | 
| 2147 | 
         
            +
                {
         
     | 
| 2148 | 
         
            +
                  "epoch": 4.715953307392996,
         
     | 
| 2149 | 
         
            +
                  "grad_norm": 11.006982390019688,
         
     | 
| 2150 | 
         
            +
                  "learning_rate": 8.633941258149699e-06,
         
     | 
| 2151 | 
         
            +
                  "loss": 1.6169,
         
     | 
| 2152 | 
         
            +
                  "step": 306
         
     | 
| 2153 | 
         
            +
                },
         
     | 
| 2154 | 
         
            +
                {
         
     | 
| 2155 | 
         
            +
                  "epoch": 4.7315175097276265,
         
     | 
| 2156 | 
         
            +
                  "grad_norm": 8.161038346958115,
         
     | 
| 2157 | 
         
            +
                  "learning_rate": 8.578989559115842e-06,
         
     | 
| 2158 | 
         
            +
                  "loss": 1.1205,
         
     | 
| 2159 | 
         
            +
                  "step": 307
         
     | 
| 2160 | 
         
            +
                },
         
     | 
| 2161 | 
         
            +
                {
         
     | 
| 2162 | 
         
            +
                  "epoch": 4.747081712062257,
         
     | 
| 2163 | 
         
            +
                  "grad_norm": 10.360221596629799,
         
     | 
| 2164 | 
         
            +
                  "learning_rate": 8.524134985209698e-06,
         
     | 
| 2165 | 
         
            +
                  "loss": 1.1426,
         
     | 
| 2166 | 
         
            +
                  "step": 308
         
     | 
| 2167 | 
         
            +
                },
         
     | 
| 2168 | 
         
            +
                {
         
     | 
| 2169 | 
         
            +
                  "epoch": 4.762645914396887,
         
     | 
| 2170 | 
         
            +
                  "grad_norm": 8.086983191969743,
         
     | 
| 2171 | 
         
            +
                  "learning_rate": 8.46937973706511e-06,
         
     | 
| 2172 | 
         
            +
                  "loss": 1.1742,
         
     | 
| 2173 | 
         
            +
                  "step": 309
         
     | 
| 2174 | 
         
            +
                },
         
     | 
| 2175 | 
         
            +
                {
         
     | 
| 2176 | 
         
            +
                  "epoch": 4.778210116731517,
         
     | 
| 2177 | 
         
            +
                  "grad_norm": 7.93821224397039,
         
     | 
| 2178 | 
         
            +
                  "learning_rate": 8.414726011331197e-06,
         
     | 
| 2179 | 
         
            +
                  "loss": 1.3003,
         
     | 
| 2180 | 
         
            +
                  "step": 310
         
     | 
| 2181 | 
         
            +
                },
         
     | 
| 2182 | 
         
            +
                {
         
     | 
| 2183 | 
         
            +
                  "epoch": 4.793774319066148,
         
     | 
| 2184 | 
         
            +
                  "grad_norm": 6.8964113141193195,
         
     | 
| 2185 | 
         
            +
                  "learning_rate": 8.360176000584257e-06,
         
     | 
| 2186 | 
         
            +
                  "loss": 1.3921,
         
     | 
| 2187 | 
         
            +
                  "step": 311
         
     | 
| 2188 | 
         
            +
                },
         
     | 
| 2189 | 
         
            +
                {
         
     | 
| 2190 | 
         
            +
                  "epoch": 4.809338521400778,
         
     | 
| 2191 | 
         
            +
                  "grad_norm": 9.541918636824354,
         
     | 
| 2192 | 
         
            +
                  "learning_rate": 8.30573189323978e-06,
         
     | 
| 2193 | 
         
            +
                  "loss": 1.4406,
         
     | 
| 2194 | 
         
            +
                  "step": 312
         
     | 
| 2195 | 
         
            +
                },
         
     | 
| 2196 | 
         
            +
                {
         
     | 
| 2197 | 
         
            +
                  "epoch": 4.824902723735408,
         
     | 
| 2198 | 
         
            +
                  "grad_norm": 9.88288044965242,
         
     | 
| 2199 | 
         
            +
                  "learning_rate": 8.251395873464671e-06,
         
     | 
| 2200 | 
         
            +
                  "loss": 1.3104,
         
     | 
| 2201 | 
         
            +
                  "step": 313
         
     | 
| 2202 | 
         
            +
                },
         
     | 
| 2203 | 
         
            +
                {
         
     | 
| 2204 | 
         
            +
                  "epoch": 4.840466926070039,
         
     | 
| 2205 | 
         
            +
                  "grad_norm": 7.034997485501755,
         
     | 
| 2206 | 
         
            +
                  "learning_rate": 8.197170121089617e-06,
         
     | 
| 2207 | 
         
            +
                  "loss": 1.4781,
         
     | 
| 2208 | 
         
            +
                  "step": 314
         
     | 
| 2209 | 
         
            +
                },
         
     | 
| 2210 | 
         
            +
                {
         
     | 
| 2211 | 
         
            +
                  "epoch": 4.856031128404669,
         
     | 
| 2212 | 
         
            +
                  "grad_norm": 7.024182095026262,
         
     | 
| 2213 | 
         
            +
                  "learning_rate": 8.143056811521653e-06,
         
     | 
| 2214 | 
         
            +
                  "loss": 1.0645,
         
     | 
| 2215 | 
         
            +
                  "step": 315
         
     | 
| 2216 | 
         
            +
                },
         
     | 
| 2217 | 
         
            +
                {
         
     | 
| 2218 | 
         
            +
                  "epoch": 4.8715953307393,
         
     | 
| 2219 | 
         
            +
                  "grad_norm": 6.763562775528538,
         
     | 
| 2220 | 
         
            +
                  "learning_rate": 8.089058115656859e-06,
         
     | 
| 2221 | 
         
            +
                  "loss": 1.1622,
         
     | 
| 2222 | 
         
            +
                  "step": 316
         
     | 
| 2223 | 
         
            +
                },
         
     | 
| 2224 | 
         
            +
                {
         
     | 
| 2225 | 
         
            +
                  "epoch": 4.88715953307393,
         
     | 
| 2226 | 
         
            +
                  "grad_norm": 22.915236408148893,
         
     | 
| 2227 | 
         
            +
                  "learning_rate": 8.035176199793309e-06,
         
     | 
| 2228 | 
         
            +
                  "loss": 1.2201,
         
     | 
| 2229 | 
         
            +
                  "step": 317
         
     | 
| 2230 | 
         
            +
                },
         
     | 
| 2231 | 
         
            +
                {
         
     | 
| 2232 | 
         
            +
                  "epoch": 4.902723735408561,
         
     | 
| 2233 | 
         
            +
                  "grad_norm": 22.9486570021447,
         
     | 
| 2234 | 
         
            +
                  "learning_rate": 7.981413225544128e-06,
         
     | 
| 2235 | 
         
            +
                  "loss": 1.3326,
         
     | 
| 2236 | 
         
            +
                  "step": 318
         
     | 
| 2237 | 
         
            +
                },
         
     | 
| 2238 | 
         
            +
                {
         
     | 
| 2239 | 
         
            +
                  "epoch": 4.918287937743191,
         
     | 
| 2240 | 
         
            +
                  "grad_norm": 7.766492426098674,
         
     | 
| 2241 | 
         
            +
                  "learning_rate": 7.9277713497508e-06,
         
     | 
| 2242 | 
         
            +
                  "loss": 1.3444,
         
     | 
| 2243 | 
         
            +
                  "step": 319
         
     | 
| 2244 | 
         
            +
                },
         
     | 
| 2245 | 
         
            +
                {
         
     | 
| 2246 | 
         
            +
                  "epoch": 4.933852140077821,
         
     | 
| 2247 | 
         
            +
                  "grad_norm": 8.922409990504782,
         
     | 
| 2248 | 
         
            +
                  "learning_rate": 7.87425272439662e-06,
         
     | 
| 2249 | 
         
            +
                  "loss": 1.2533,
         
     | 
| 2250 | 
         
            +
                  "step": 320
         
     | 
| 2251 | 
         
            +
                },
         
     | 
| 2252 | 
         
            +
                {
         
     | 
| 2253 | 
         
            +
                  "epoch": 4.9494163424124515,
         
     | 
| 2254 | 
         
            +
                  "grad_norm": 10.120435136565488,
         
     | 
| 2255 | 
         
            +
                  "learning_rate": 7.82085949652038e-06,
         
     | 
| 2256 | 
         
            +
                  "loss": 1.4161,
         
     | 
| 2257 | 
         
            +
                  "step": 321
         
     | 
| 2258 | 
         
            +
                },
         
     | 
| 2259 | 
         
            +
                {
         
     | 
| 2260 | 
         
            +
                  "epoch": 4.964980544747082,
         
     | 
| 2261 | 
         
            +
                  "grad_norm": 11.585619162092177,
         
     | 
| 2262 | 
         
            +
                  "learning_rate": 7.767593808130216e-06,
         
     | 
| 2263 | 
         
            +
                  "loss": 1.408,
         
     | 
| 2264 | 
         
            +
                  "step": 322
         
     | 
| 2265 | 
         
            +
                },
         
     | 
| 2266 | 
         
            +
                {
         
     | 
| 2267 | 
         
            +
                  "epoch": 4.980544747081712,
         
     | 
| 2268 | 
         
            +
                  "grad_norm": 8.068469524272025,
         
     | 
| 2269 | 
         
            +
                  "learning_rate": 7.714457796117705e-06,
         
     | 
| 2270 | 
         
            +
                  "loss": 1.175,
         
     | 
| 2271 | 
         
            +
                  "step": 323
         
     | 
| 2272 | 
         
            +
                },
         
     | 
| 2273 | 
         
            +
                {
         
     | 
| 2274 | 
         
            +
                  "epoch": 4.996108949416342,
         
     | 
| 2275 | 
         
            +
                  "grad_norm": 6.472014606610803,
         
     | 
| 2276 | 
         
            +
                  "learning_rate": 7.661453592172093e-06,
         
     | 
| 2277 | 
         
            +
                  "loss": 1.356,
         
     | 
| 2278 | 
         
            +
                  "step": 324
         
     | 
| 2279 | 
         
            +
                },
         
     | 
| 2280 | 
         
            +
                {
         
     | 
| 2281 | 
         
            +
                  "epoch": 5.0,
         
     | 
| 2282 | 
         
            +
                  "grad_norm": 6.472014606610803,
         
     | 
| 2283 | 
         
            +
                  "learning_rate": 7.60858332269482e-06,
         
     | 
| 2284 | 
         
            +
                  "loss": 0.4053,
         
     | 
| 2285 | 
         
            +
                  "step": 325
         
     | 
| 2286 | 
         
            +
                }
         
     | 
| 2287 | 
         
            +
              ],
         
     | 
| 2288 | 
         
            +
              "logging_steps": 1.0,
         
     | 
| 2289 | 
         
            +
              "max_steps": 512,
         
     | 
| 2290 | 
         
            +
              "num_input_tokens_seen": 0,
         
     | 
| 2291 | 
         
            +
              "num_train_epochs": 8,
         
     | 
| 2292 | 
         
            +
              "save_steps": 500,
         
     | 
| 2293 | 
         
            +
              "stateful_callbacks": {
         
     | 
| 2294 | 
         
            +
                "TrainerControl": {
         
     | 
| 2295 | 
         
            +
                  "args": {
         
     | 
| 2296 | 
         
            +
                    "should_epoch_stop": false,
         
     | 
| 2297 | 
         
            +
                    "should_evaluate": false,
         
     | 
| 2298 | 
         
            +
                    "should_log": false,
         
     | 
| 2299 | 
         
            +
                    "should_save": true,
         
     | 
| 2300 | 
         
            +
                    "should_training_stop": false
         
     | 
| 2301 | 
         
            +
                  },
         
     | 
| 2302 | 
         
            +
                  "attributes": {}
         
     | 
| 2303 | 
         
            +
                }
         
     | 
| 2304 | 
         
            +
              },
         
     | 
| 2305 | 
         
            +
              "total_flos": 176455044300800.0,
         
     | 
| 2306 | 
         
            +
              "train_batch_size": 4,
         
     | 
| 2307 | 
         
            +
              "trial_name": null,
         
     | 
| 2308 | 
         
            +
              "trial_params": null
         
     | 
| 2309 | 
         
            +
            }
         
     | 
    	
        training_args.bin
    ADDED
    
    | 
         @@ -0,0 +1,3 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            version https://git-lfs.github.com/spec/v1
         
     | 
| 2 | 
         
            +
            oid sha256:55a4adaab8848e3bd314a2a5b4b72c4727f222798a39a203c1916539b3a3cc70
         
     | 
| 3 | 
         
            +
            size 8120
         
     | 
    	
        zero_to_fp32.py
    ADDED
    
    | 
         @@ -0,0 +1,760 @@ 
     | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
|
| 
         | 
| 
         | 
|
| 1 | 
         
            +
            #!/usr/bin/env python
         
     | 
| 2 | 
         
            +
             
     | 
| 3 | 
         
            +
            # Copyright (c) Microsoft Corporation.
         
     | 
| 4 | 
         
            +
            # SPDX-License-Identifier: Apache-2.0
         
     | 
| 5 | 
         
            +
             
     | 
| 6 | 
         
            +
            # DeepSpeed Team
         
     | 
| 7 | 
         
            +
             
     | 
| 8 | 
         
            +
            # This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
         
     | 
| 9 | 
         
            +
            # copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
         
     | 
| 10 | 
         
            +
            # the future. Once extracted, the weights don't require DeepSpeed and can be used in any
         
     | 
| 11 | 
         
            +
            # application.
         
     | 
| 12 | 
         
            +
            #
         
     | 
| 13 | 
         
            +
            # example:
         
     | 
| 14 | 
         
            +
            #   python zero_to_fp32.py . output_dir/
         
     | 
| 15 | 
         
            +
            #   or
         
     | 
| 16 | 
         
            +
            #   python zero_to_fp32.py . output_dir/ --safe_serialization
         
     | 
| 17 | 
         
            +
             
     | 
| 18 | 
         
            +
            import argparse
         
     | 
| 19 | 
         
            +
            import torch
         
     | 
| 20 | 
         
            +
            import glob
         
     | 
| 21 | 
         
            +
            import math
         
     | 
| 22 | 
         
            +
            import os
         
     | 
| 23 | 
         
            +
            import re
         
     | 
| 24 | 
         
            +
            import gc
         
     | 
| 25 | 
         
            +
            import json
         
     | 
| 26 | 
         
            +
            import numpy as np
         
     | 
| 27 | 
         
            +
            from tqdm import tqdm
         
     | 
| 28 | 
         
            +
            from collections import OrderedDict
         
     | 
| 29 | 
         
            +
            from dataclasses import dataclass
         
     | 
| 30 | 
         
            +
             
     | 
| 31 | 
         
            +
            # while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
         
     | 
| 32 | 
         
            +
            # DeepSpeed data structures it has to be available in the current python environment.
         
     | 
| 33 | 
         
            +
            from deepspeed.utils import logger
         
     | 
| 34 | 
         
            +
            from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
         
     | 
| 35 | 
         
            +
                                                        FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
         
     | 
| 36 | 
         
            +
                                                        FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
         
     | 
| 37 | 
         
            +
             
     | 
| 38 | 
         
            +
             
     | 
| 39 | 
         
            +
            @dataclass
         
     | 
| 40 | 
         
            +
            class zero_model_state:
         
     | 
| 41 | 
         
            +
                buffers: dict()
         
     | 
| 42 | 
         
            +
                param_shapes: dict()
         
     | 
| 43 | 
         
            +
                shared_params: list
         
     | 
| 44 | 
         
            +
                ds_version: int
         
     | 
| 45 | 
         
            +
                frozen_param_shapes: dict()
         
     | 
| 46 | 
         
            +
                frozen_param_fragments: dict()
         
     | 
| 47 | 
         
            +
             
     | 
| 48 | 
         
            +
             
     | 
| 49 | 
         
            +
            debug = 0
         
     | 
| 50 | 
         
            +
             
     | 
| 51 | 
         
            +
            # load to cpu
         
     | 
| 52 | 
         
            +
            device = torch.device('cpu')
         
     | 
| 53 | 
         
            +
             
     | 
| 54 | 
         
            +
             
     | 
| 55 | 
         
            +
            def atoi(text):
         
     | 
| 56 | 
         
            +
                return int(text) if text.isdigit() else text
         
     | 
| 57 | 
         
            +
             
     | 
| 58 | 
         
            +
             
     | 
| 59 | 
         
            +
            def natural_keys(text):
         
     | 
| 60 | 
         
            +
                '''
         
     | 
| 61 | 
         
            +
                alist.sort(key=natural_keys) sorts in human order
         
     | 
| 62 | 
         
            +
                http://nedbatchelder.com/blog/200712/human_sorting.html
         
     | 
| 63 | 
         
            +
                (See Toothy's implementation in the comments)
         
     | 
| 64 | 
         
            +
                '''
         
     | 
| 65 | 
         
            +
                return [atoi(c) for c in re.split(r'(\d+)', text)]
         
     | 
| 66 | 
         
            +
             
     | 
| 67 | 
         
            +
             
     | 
| 68 | 
         
            +
            def get_model_state_file(checkpoint_dir, zero_stage):
         
     | 
| 69 | 
         
            +
                if not os.path.isdir(checkpoint_dir):
         
     | 
| 70 | 
         
            +
                    raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
         
     | 
| 71 | 
         
            +
             
     | 
| 72 | 
         
            +
                # there should be only one file
         
     | 
| 73 | 
         
            +
                if zero_stage <= 2:
         
     | 
| 74 | 
         
            +
                    file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
         
     | 
| 75 | 
         
            +
                elif zero_stage == 3:
         
     | 
| 76 | 
         
            +
                    file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
         
     | 
| 77 | 
         
            +
             
     | 
| 78 | 
         
            +
                if not os.path.exists(file):
         
     | 
| 79 | 
         
            +
                    raise FileNotFoundError(f"can't find model states file at '{file}'")
         
     | 
| 80 | 
         
            +
             
     | 
| 81 | 
         
            +
                return file
         
     | 
| 82 | 
         
            +
             
     | 
| 83 | 
         
            +
             
     | 
| 84 | 
         
            +
            def get_checkpoint_files(checkpoint_dir, glob_pattern):
         
     | 
| 85 | 
         
            +
                # XXX: need to test that this simple glob rule works for multi-node setup too
         
     | 
| 86 | 
         
            +
                ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
         
     | 
| 87 | 
         
            +
             
     | 
| 88 | 
         
            +
                if len(ckpt_files) == 0:
         
     | 
| 89 | 
         
            +
                    raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
         
     | 
| 90 | 
         
            +
             
     | 
| 91 | 
         
            +
                return ckpt_files
         
     | 
| 92 | 
         
            +
             
     | 
| 93 | 
         
            +
             
     | 
| 94 | 
         
            +
            def get_optim_files(checkpoint_dir):
         
     | 
| 95 | 
         
            +
                return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
         
     | 
| 96 | 
         
            +
             
     | 
| 97 | 
         
            +
             
     | 
| 98 | 
         
            +
            def get_model_state_files(checkpoint_dir):
         
     | 
| 99 | 
         
            +
                return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
         
     | 
| 100 | 
         
            +
             
     | 
| 101 | 
         
            +
             
     | 
| 102 | 
         
            +
            def parse_model_states(files):
         
     | 
| 103 | 
         
            +
                zero_model_states = []
         
     | 
| 104 | 
         
            +
                for file in files:
         
     | 
| 105 | 
         
            +
                    state_dict = torch.load(file, map_location=device, weights_only=False)
         
     | 
| 106 | 
         
            +
             
     | 
| 107 | 
         
            +
                    if BUFFER_NAMES not in state_dict:
         
     | 
| 108 | 
         
            +
                        raise ValueError(f"{file} is not a model state checkpoint")
         
     | 
| 109 | 
         
            +
                    buffer_names = state_dict[BUFFER_NAMES]
         
     | 
| 110 | 
         
            +
                    if debug:
         
     | 
| 111 | 
         
            +
                        print("Found buffers:", buffer_names)
         
     | 
| 112 | 
         
            +
             
     | 
| 113 | 
         
            +
                    # recover just the buffers while restoring them to fp32 if they were saved in fp16
         
     | 
| 114 | 
         
            +
                    buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
         
     | 
| 115 | 
         
            +
                    param_shapes = state_dict[PARAM_SHAPES]
         
     | 
| 116 | 
         
            +
             
     | 
| 117 | 
         
            +
                    # collect parameters that are included in param_shapes
         
     | 
| 118 | 
         
            +
                    param_names = []
         
     | 
| 119 | 
         
            +
                    for s in param_shapes:
         
     | 
| 120 | 
         
            +
                        for name in s.keys():
         
     | 
| 121 | 
         
            +
                            param_names.append(name)
         
     | 
| 122 | 
         
            +
             
     | 
| 123 | 
         
            +
                    # update with frozen parameters
         
     | 
| 124 | 
         
            +
                    frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
         
     | 
| 125 | 
         
            +
                    if frozen_param_shapes is not None:
         
     | 
| 126 | 
         
            +
                        if debug:
         
     | 
| 127 | 
         
            +
                            print(f"Found frozen_param_shapes: {frozen_param_shapes}")
         
     | 
| 128 | 
         
            +
                        param_names += list(frozen_param_shapes.keys())
         
     | 
| 129 | 
         
            +
             
     | 
| 130 | 
         
            +
                    # handle shared params
         
     | 
| 131 | 
         
            +
                    shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
         
     | 
| 132 | 
         
            +
             
     | 
| 133 | 
         
            +
                    ds_version = state_dict.get(DS_VERSION, None)
         
     | 
| 134 | 
         
            +
             
     | 
| 135 | 
         
            +
                    frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
         
     | 
| 136 | 
         
            +
             
     | 
| 137 | 
         
            +
                    z_model_state = zero_model_state(buffers=buffers,
         
     | 
| 138 | 
         
            +
                                                     param_shapes=param_shapes,
         
     | 
| 139 | 
         
            +
                                                     shared_params=shared_params,
         
     | 
| 140 | 
         
            +
                                                     ds_version=ds_version,
         
     | 
| 141 | 
         
            +
                                                     frozen_param_shapes=frozen_param_shapes,
         
     | 
| 142 | 
         
            +
                                                     frozen_param_fragments=frozen_param_fragments)
         
     | 
| 143 | 
         
            +
                    zero_model_states.append(z_model_state)
         
     | 
| 144 | 
         
            +
             
     | 
| 145 | 
         
            +
                return zero_model_states
         
     | 
| 146 | 
         
            +
             
     | 
| 147 | 
         
            +
             
     | 
| 148 | 
         
            +
            def parse_optim_states(files, ds_checkpoint_dir):
         
     | 
| 149 | 
         
            +
                total_files = len(files)
         
     | 
| 150 | 
         
            +
                state_dicts = []
         
     | 
| 151 | 
         
            +
                for f in tqdm(files, desc='Loading checkpoint shards'):
         
     | 
| 152 | 
         
            +
                    state_dict = torch.load(f, map_location=device, mmap=True, weights_only=False)
         
     | 
| 153 | 
         
            +
                    # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
         
     | 
| 154 | 
         
            +
                    # and also handle the case where it was already removed by another helper script
         
     | 
| 155 | 
         
            +
                    state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
         
     | 
| 156 | 
         
            +
                    state_dicts.append(state_dict)
         
     | 
| 157 | 
         
            +
             
     | 
| 158 | 
         
            +
                if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
         
     | 
| 159 | 
         
            +
                    raise ValueError(f"{files[0]} is not a zero checkpoint")
         
     | 
| 160 | 
         
            +
                zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
         
     | 
| 161 | 
         
            +
                world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
         
     | 
| 162 | 
         
            +
             
     | 
| 163 | 
         
            +
                # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
         
     | 
| 164 | 
         
            +
                # parameters can be different from data parallelism for non-expert parameters. So we can just
         
     | 
| 165 | 
         
            +
                # use the max of the partition_count to get the dp world_size.
         
     | 
| 166 | 
         
            +
             
     | 
| 167 | 
         
            +
                if type(world_size) is list:
         
     | 
| 168 | 
         
            +
                    world_size = max(world_size)
         
     | 
| 169 | 
         
            +
             
     | 
| 170 | 
         
            +
                if world_size != total_files:
         
     | 
| 171 | 
         
            +
                    raise ValueError(
         
     | 
| 172 | 
         
            +
                        f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
         
     | 
| 173 | 
         
            +
                        "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
         
     | 
| 174 | 
         
            +
                    )
         
     | 
| 175 | 
         
            +
             
     | 
| 176 | 
         
            +
                # the groups are named differently in each stage
         
     | 
| 177 | 
         
            +
                if zero_stage <= 2:
         
     | 
| 178 | 
         
            +
                    fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
         
     | 
| 179 | 
         
            +
                elif zero_stage == 3:
         
     | 
| 180 | 
         
            +
                    fp32_groups_key = FP32_FLAT_GROUPS
         
     | 
| 181 | 
         
            +
                else:
         
     | 
| 182 | 
         
            +
                    raise ValueError(f"unknown zero stage {zero_stage}")
         
     | 
| 183 | 
         
            +
             
     | 
| 184 | 
         
            +
                fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
         
     | 
| 185 | 
         
            +
                return zero_stage, world_size, fp32_flat_groups
         
     | 
| 186 | 
         
            +
             
     | 
| 187 | 
         
            +
             
     | 
| 188 | 
         
            +
            def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
         
     | 
| 189 | 
         
            +
                """
         
     | 
| 190 | 
         
            +
                Returns fp32 state_dict reconstructed from ds checkpoint
         
     | 
| 191 | 
         
            +
             
     | 
| 192 | 
         
            +
                Args:
         
     | 
| 193 | 
         
            +
                    - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
         
     | 
| 194 | 
         
            +
             
     | 
| 195 | 
         
            +
                """
         
     | 
| 196 | 
         
            +
                print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
         
     | 
| 197 | 
         
            +
             
     | 
| 198 | 
         
            +
                optim_files = get_optim_files(ds_checkpoint_dir)
         
     | 
| 199 | 
         
            +
                zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
         
     | 
| 200 | 
         
            +
                print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
         
     | 
| 201 | 
         
            +
             
     | 
| 202 | 
         
            +
                model_files = get_model_state_files(ds_checkpoint_dir)
         
     | 
| 203 | 
         
            +
             
     | 
| 204 | 
         
            +
                zero_model_states = parse_model_states(model_files)
         
     | 
| 205 | 
         
            +
                print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
         
     | 
| 206 | 
         
            +
             
     | 
| 207 | 
         
            +
                if zero_stage <= 2:
         
     | 
| 208 | 
         
            +
                    return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
         
     | 
| 209 | 
         
            +
                                                                      exclude_frozen_parameters)
         
     | 
| 210 | 
         
            +
                elif zero_stage == 3:
         
     | 
| 211 | 
         
            +
                    return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
         
     | 
| 212 | 
         
            +
                                                                      exclude_frozen_parameters)
         
     | 
| 213 | 
         
            +
             
     | 
| 214 | 
         
            +
             
     | 
| 215 | 
         
            +
            def _zero2_merge_frozen_params(state_dict, zero_model_states):
         
     | 
| 216 | 
         
            +
                if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
         
     | 
| 217 | 
         
            +
                    return
         
     | 
| 218 | 
         
            +
             
     | 
| 219 | 
         
            +
                frozen_param_shapes = zero_model_states[0].frozen_param_shapes
         
     | 
| 220 | 
         
            +
                frozen_param_fragments = zero_model_states[0].frozen_param_fragments
         
     | 
| 221 | 
         
            +
             
     | 
| 222 | 
         
            +
                if debug:
         
     | 
| 223 | 
         
            +
                    num_elem = sum(s.numel() for s in frozen_param_shapes.values())
         
     | 
| 224 | 
         
            +
                    print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
         
     | 
| 225 | 
         
            +
             
     | 
| 226 | 
         
            +
                    wanted_params = len(frozen_param_shapes)
         
     | 
| 227 | 
         
            +
                    wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
         
     | 
| 228 | 
         
            +
                    avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
         
     | 
| 229 | 
         
            +
                    print(f'Frozen params: Have {avail_numel} numels to process.')
         
     | 
| 230 | 
         
            +
                    print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
         
     | 
| 231 | 
         
            +
             
     | 
| 232 | 
         
            +
                total_params = 0
         
     | 
| 233 | 
         
            +
                total_numel = 0
         
     | 
| 234 | 
         
            +
                for name, shape in frozen_param_shapes.items():
         
     | 
| 235 | 
         
            +
                    total_params += 1
         
     | 
| 236 | 
         
            +
                    unpartitioned_numel = shape.numel()
         
     | 
| 237 | 
         
            +
                    total_numel += unpartitioned_numel
         
     | 
| 238 | 
         
            +
             
     | 
| 239 | 
         
            +
                    state_dict[name] = frozen_param_fragments[name]
         
     | 
| 240 | 
         
            +
             
     | 
| 241 | 
         
            +
                    if debug:
         
     | 
| 242 | 
         
            +
                        print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
         
     | 
| 243 | 
         
            +
             
     | 
| 244 | 
         
            +
                print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
         
     | 
| 245 | 
         
            +
             
     | 
| 246 | 
         
            +
             
     | 
| 247 | 
         
            +
            def _has_callable(obj, fn):
         
     | 
| 248 | 
         
            +
                attr = getattr(obj, fn, None)
         
     | 
| 249 | 
         
            +
                return callable(attr)
         
     | 
| 250 | 
         
            +
             
     | 
| 251 | 
         
            +
             
     | 
| 252 | 
         
            +
            def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
         
     | 
| 253 | 
         
            +
                param_shapes = zero_model_states[0].param_shapes
         
     | 
| 254 | 
         
            +
             
     | 
| 255 | 
         
            +
                # Reconstruction protocol:
         
     | 
| 256 | 
         
            +
                #
         
     | 
| 257 | 
         
            +
                # XXX: document this
         
     | 
| 258 | 
         
            +
             
     | 
| 259 | 
         
            +
                if debug:
         
     | 
| 260 | 
         
            +
                    for i in range(world_size):
         
     | 
| 261 | 
         
            +
                        for j in range(len(fp32_flat_groups[0])):
         
     | 
| 262 | 
         
            +
                            print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
         
     | 
| 263 | 
         
            +
             
     | 
| 264 | 
         
            +
                # XXX: memory usage doubles here (zero2)
         
     | 
| 265 | 
         
            +
                num_param_groups = len(fp32_flat_groups[0])
         
     | 
| 266 | 
         
            +
                merged_single_partition_of_fp32_groups = []
         
     | 
| 267 | 
         
            +
                for i in range(num_param_groups):
         
     | 
| 268 | 
         
            +
                    merged_partitions = [sd[i] for sd in fp32_flat_groups]
         
     | 
| 269 | 
         
            +
                    full_single_fp32_vector = torch.cat(merged_partitions, 0)
         
     | 
| 270 | 
         
            +
                    merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
         
     | 
| 271 | 
         
            +
                avail_numel = sum(
         
     | 
| 272 | 
         
            +
                    [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
         
     | 
| 273 | 
         
            +
             
     | 
| 274 | 
         
            +
                if debug:
         
     | 
| 275 | 
         
            +
                    wanted_params = sum([len(shapes) for shapes in param_shapes])
         
     | 
| 276 | 
         
            +
                    wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
         
     | 
| 277 | 
         
            +
                    # not asserting if there is a mismatch due to possible padding
         
     | 
| 278 | 
         
            +
                    print(f"Have {avail_numel} numels to process.")
         
     | 
| 279 | 
         
            +
                    print(f"Need {wanted_numel} numels in {wanted_params} params.")
         
     | 
| 280 | 
         
            +
             
     | 
| 281 | 
         
            +
                # params
         
     | 
| 282 | 
         
            +
                # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
         
     | 
| 283 | 
         
            +
                # out-of-core computing solution
         
     | 
| 284 | 
         
            +
                total_numel = 0
         
     | 
| 285 | 
         
            +
                total_params = 0
         
     | 
| 286 | 
         
            +
                for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
         
     | 
| 287 | 
         
            +
                    offset = 0
         
     | 
| 288 | 
         
            +
                    avail_numel = full_single_fp32_vector.numel()
         
     | 
| 289 | 
         
            +
                    for name, shape in shapes.items():
         
     | 
| 290 | 
         
            +
             
     | 
| 291 | 
         
            +
                        unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
         
     | 
| 292 | 
         
            +
                        total_numel += unpartitioned_numel
         
     | 
| 293 | 
         
            +
                        total_params += 1
         
     | 
| 294 | 
         
            +
             
     | 
| 295 | 
         
            +
                        if debug:
         
     | 
| 296 | 
         
            +
                            print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
         
     | 
| 297 | 
         
            +
                        state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
         
     | 
| 298 | 
         
            +
                        offset += unpartitioned_numel
         
     | 
| 299 | 
         
            +
             
     | 
| 300 | 
         
            +
                    # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
         
     | 
| 301 | 
         
            +
                    # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
         
     | 
| 302 | 
         
            +
                    # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
         
     | 
| 303 | 
         
            +
                    # live optimizer object, so we are checking that the numbers are within the right range
         
     | 
| 304 | 
         
            +
                    align_to = 2 * world_size
         
     | 
| 305 | 
         
            +
             
     | 
| 306 | 
         
            +
                    def zero2_align(x):
         
     | 
| 307 | 
         
            +
                        return align_to * math.ceil(x / align_to)
         
     | 
| 308 | 
         
            +
             
     | 
| 309 | 
         
            +
                    if debug:
         
     | 
| 310 | 
         
            +
                        print(f"original offset={offset}, avail_numel={avail_numel}")
         
     | 
| 311 | 
         
            +
             
     | 
| 312 | 
         
            +
                    offset = zero2_align(offset)
         
     | 
| 313 | 
         
            +
                    avail_numel = zero2_align(avail_numel)
         
     | 
| 314 | 
         
            +
             
     | 
| 315 | 
         
            +
                    if debug:
         
     | 
| 316 | 
         
            +
                        print(f"aligned  offset={offset}, avail_numel={avail_numel}")
         
     | 
| 317 | 
         
            +
             
     | 
| 318 | 
         
            +
                    # Sanity check
         
     | 
| 319 | 
         
            +
                    if offset != avail_numel:
         
     | 
| 320 | 
         
            +
                        raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
         
     | 
| 321 | 
         
            +
             
     | 
| 322 | 
         
            +
                print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
         
     | 
| 323 | 
         
            +
             
     | 
| 324 | 
         
            +
             
     | 
| 325 | 
         
            +
            def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
         
     | 
| 326 | 
         
            +
                                                           exclude_frozen_parameters):
         
     | 
| 327 | 
         
            +
                state_dict = OrderedDict()
         
     | 
| 328 | 
         
            +
             
     | 
| 329 | 
         
            +
                # buffers
         
     | 
| 330 | 
         
            +
                buffers = zero_model_states[0].buffers
         
     | 
| 331 | 
         
            +
                state_dict.update(buffers)
         
     | 
| 332 | 
         
            +
                if debug:
         
     | 
| 333 | 
         
            +
                    print(f"added {len(buffers)} buffers")
         
     | 
| 334 | 
         
            +
             
     | 
| 335 | 
         
            +
                if not exclude_frozen_parameters:
         
     | 
| 336 | 
         
            +
                    _zero2_merge_frozen_params(state_dict, zero_model_states)
         
     | 
| 337 | 
         
            +
             
     | 
| 338 | 
         
            +
                _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
         
     | 
| 339 | 
         
            +
             
     | 
| 340 | 
         
            +
                # recover shared parameters
         
     | 
| 341 | 
         
            +
                for pair in zero_model_states[0].shared_params:
         
     | 
| 342 | 
         
            +
                    if pair[1] in state_dict:
         
     | 
| 343 | 
         
            +
                        state_dict[pair[0]] = state_dict[pair[1]]
         
     | 
| 344 | 
         
            +
             
     | 
| 345 | 
         
            +
                return state_dict
         
     | 
| 346 | 
         
            +
             
     | 
| 347 | 
         
            +
             
     | 
| 348 | 
         
            +
            def zero3_partitioned_param_info(unpartitioned_numel, world_size):
         
     | 
| 349 | 
         
            +
                remainder = unpartitioned_numel % world_size
         
     | 
| 350 | 
         
            +
                padding_numel = (world_size - remainder) if remainder else 0
         
     | 
| 351 | 
         
            +
                partitioned_numel = math.ceil(unpartitioned_numel / world_size)
         
     | 
| 352 | 
         
            +
                return partitioned_numel, padding_numel
         
     | 
| 353 | 
         
            +
             
     | 
| 354 | 
         
            +
             
     | 
| 355 | 
         
            +
            def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
         
     | 
| 356 | 
         
            +
                if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
         
     | 
| 357 | 
         
            +
                    return
         
     | 
| 358 | 
         
            +
             
     | 
| 359 | 
         
            +
                if debug:
         
     | 
| 360 | 
         
            +
                    for i in range(world_size):
         
     | 
| 361 | 
         
            +
                        num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
         
     | 
| 362 | 
         
            +
                        print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
         
     | 
| 363 | 
         
            +
             
     | 
| 364 | 
         
            +
                    frozen_param_shapes = zero_model_states[0].frozen_param_shapes
         
     | 
| 365 | 
         
            +
                    wanted_params = len(frozen_param_shapes)
         
     | 
| 366 | 
         
            +
                    wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
         
     | 
| 367 | 
         
            +
                    avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
         
     | 
| 368 | 
         
            +
                    print(f'Frozen params: Have {avail_numel} numels to process.')
         
     | 
| 369 | 
         
            +
                    print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
         
     | 
| 370 | 
         
            +
             
     | 
| 371 | 
         
            +
                total_params = 0
         
     | 
| 372 | 
         
            +
                total_numel = 0
         
     | 
| 373 | 
         
            +
                for name, shape in zero_model_states[0].frozen_param_shapes.items():
         
     | 
| 374 | 
         
            +
                    total_params += 1
         
     | 
| 375 | 
         
            +
                    unpartitioned_numel = shape.numel()
         
     | 
| 376 | 
         
            +
                    total_numel += unpartitioned_numel
         
     | 
| 377 | 
         
            +
             
     | 
| 378 | 
         
            +
                    param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
         
     | 
| 379 | 
         
            +
                    state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
         
     | 
| 380 | 
         
            +
             
     | 
| 381 | 
         
            +
                    partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
         
     | 
| 382 | 
         
            +
             
     | 
| 383 | 
         
            +
                    if debug:
         
     | 
| 384 | 
         
            +
                        print(
         
     | 
| 385 | 
         
            +
                            f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
         
     | 
| 386 | 
         
            +
                        )
         
     | 
| 387 | 
         
            +
             
     | 
| 388 | 
         
            +
                print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
         
     | 
| 389 | 
         
            +
             
     | 
| 390 | 
         
            +
             
     | 
| 391 | 
         
            +
            class GatheredTensor:
         
     | 
| 392 | 
         
            +
                """
         
     | 
| 393 | 
         
            +
                A pseudo tensor that collects partitioned weights.
         
     | 
| 394 | 
         
            +
                It is more memory efficient when there are multiple groups.
         
     | 
| 395 | 
         
            +
                """
         
     | 
| 396 | 
         
            +
             
     | 
| 397 | 
         
            +
                def __init__(self, flat_groups, flat_groups_offset, offset, partitioned_numel, shape):
         
     | 
| 398 | 
         
            +
                    self.flat_groups = flat_groups
         
     | 
| 399 | 
         
            +
                    self.flat_groups_offset = flat_groups_offset
         
     | 
| 400 | 
         
            +
                    self.offset = offset
         
     | 
| 401 | 
         
            +
                    self.partitioned_numel = partitioned_numel
         
     | 
| 402 | 
         
            +
                    self.shape = shape
         
     | 
| 403 | 
         
            +
                    self.dtype = self.flat_groups[0][0].dtype
         
     | 
| 404 | 
         
            +
             
     | 
| 405 | 
         
            +
                def contiguous(self):
         
     | 
| 406 | 
         
            +
                    """
         
     | 
| 407 | 
         
            +
                    Merge partitioned weights from flat_groups into a single tensor.
         
     | 
| 408 | 
         
            +
                    """
         
     | 
| 409 | 
         
            +
                    end_idx = self.offset + self.partitioned_numel
         
     | 
| 410 | 
         
            +
                    world_size = len(self.flat_groups)
         
     | 
| 411 | 
         
            +
                    pad_flat_param_chunks = []
         
     | 
| 412 | 
         
            +
             
     | 
| 413 | 
         
            +
                    for rank_i in range(world_size):
         
     | 
| 414 | 
         
            +
                        # for each rank, we need to collect weights from related group/groups
         
     | 
| 415 | 
         
            +
                        flat_groups_at_rank_i = self.flat_groups[rank_i]
         
     | 
| 416 | 
         
            +
                        start_group_id = None
         
     | 
| 417 | 
         
            +
                        end_group_id = None
         
     | 
| 418 | 
         
            +
                        for group_id in range(len(self.flat_groups_offset)):
         
     | 
| 419 | 
         
            +
                            if self.flat_groups_offset[group_id] <= self.offset < self.flat_groups_offset[group_id + 1]:
         
     | 
| 420 | 
         
            +
                                start_group_id = group_id
         
     | 
| 421 | 
         
            +
                            if self.flat_groups_offset[group_id] < end_idx <= self.flat_groups_offset[group_id + 1]:
         
     | 
| 422 | 
         
            +
                                end_group_id = group_id
         
     | 
| 423 | 
         
            +
                                break
         
     | 
| 424 | 
         
            +
                        # collect weights from related group/groups
         
     | 
| 425 | 
         
            +
                        for group_id in range(start_group_id, end_group_id + 1):
         
     | 
| 426 | 
         
            +
                            flat_tensor = flat_groups_at_rank_i[group_id]
         
     | 
| 427 | 
         
            +
                            start_offset = self.offset - self.flat_groups_offset[group_id]
         
     | 
| 428 | 
         
            +
                            end_offset = min(end_idx, self.flat_groups_offset[group_id + 1]) - self.flat_groups_offset[group_id]
         
     | 
| 429 | 
         
            +
                            pad_flat_param_chunks.append(flat_tensor[start_offset:end_offset])
         
     | 
| 430 | 
         
            +
             
     | 
| 431 | 
         
            +
                    # collect weights from all ranks
         
     | 
| 432 | 
         
            +
                    pad_flat_param = torch.cat(pad_flat_param_chunks, dim=0)
         
     | 
| 433 | 
         
            +
                    param = pad_flat_param[:self.shape.numel()].view(self.shape).contiguous()
         
     | 
| 434 | 
         
            +
                    return param
         
     | 
| 435 | 
         
            +
             
     | 
| 436 | 
         
            +
             
     | 
| 437 | 
         
            +
            def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
         
     | 
| 438 | 
         
            +
                param_shapes = zero_model_states[0].param_shapes
         
     | 
| 439 | 
         
            +
                avail_numel = sum([flat_group.numel() for flat_group in fp32_flat_groups[0]]) * world_size
         
     | 
| 440 | 
         
            +
             
     | 
| 441 | 
         
            +
                # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
         
     | 
| 442 | 
         
            +
                # param, re-consolidating each param, while dealing with padding if any
         
     | 
| 443 | 
         
            +
             
     | 
| 444 | 
         
            +
                # merge list of dicts, preserving order
         
     | 
| 445 | 
         
            +
                param_shapes = {k: v for d in param_shapes for k, v in d.items()}
         
     | 
| 446 | 
         
            +
             
     | 
| 447 | 
         
            +
                if debug:
         
     | 
| 448 | 
         
            +
                    for i in range(world_size):
         
     | 
| 449 | 
         
            +
                        print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
         
     | 
| 450 | 
         
            +
             
     | 
| 451 | 
         
            +
                    wanted_params = len(param_shapes)
         
     | 
| 452 | 
         
            +
                    wanted_numel = sum(shape.numel() for shape in param_shapes.values())
         
     | 
| 453 | 
         
            +
                    # not asserting if there is a mismatch due to possible padding
         
     | 
| 454 | 
         
            +
                    avail_numel = fp32_flat_groups[0].numel() * world_size
         
     | 
| 455 | 
         
            +
                    print(f"Trainable params: Have {avail_numel} numels to process.")
         
     | 
| 456 | 
         
            +
                    print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
         
     | 
| 457 | 
         
            +
             
     | 
| 458 | 
         
            +
                # params
         
     | 
| 459 | 
         
            +
                # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
         
     | 
| 460 | 
         
            +
                # out-of-core computing solution
         
     | 
| 461 | 
         
            +
                offset = 0
         
     | 
| 462 | 
         
            +
                total_numel = 0
         
     | 
| 463 | 
         
            +
                total_params = 0
         
     | 
| 464 | 
         
            +
                flat_groups_offset = [0] + list(np.cumsum([flat_tensor.numel() for flat_tensor in fp32_flat_groups[0]]))
         
     | 
| 465 | 
         
            +
                for name, shape in tqdm(param_shapes.items(), desc='Gathering sharded weights'):
         
     | 
| 466 | 
         
            +
                    unpartitioned_numel = shape.numel()
         
     | 
| 467 | 
         
            +
                    total_numel += unpartitioned_numel
         
     | 
| 468 | 
         
            +
                    total_params += 1
         
     | 
| 469 | 
         
            +
                    partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
         
     | 
| 470 | 
         
            +
             
     | 
| 471 | 
         
            +
                    if debug:
         
     | 
| 472 | 
         
            +
                        print(
         
     | 
| 473 | 
         
            +
                            f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
         
     | 
| 474 | 
         
            +
                        )
         
     | 
| 475 | 
         
            +
             
     | 
| 476 | 
         
            +
                    # memory efficient tensor
         
     | 
| 477 | 
         
            +
                    tensor = GatheredTensor(fp32_flat_groups, flat_groups_offset, offset, partitioned_numel, shape)
         
     | 
| 478 | 
         
            +
                    state_dict[name] = tensor
         
     | 
| 479 | 
         
            +
                    offset += partitioned_numel
         
     | 
| 480 | 
         
            +
             
     | 
| 481 | 
         
            +
                offset *= world_size
         
     | 
| 482 | 
         
            +
             
     | 
| 483 | 
         
            +
                # Sanity check
         
     | 
| 484 | 
         
            +
                if offset != avail_numel:
         
     | 
| 485 | 
         
            +
                    raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
         
     | 
| 486 | 
         
            +
             
     | 
| 487 | 
         
            +
                print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
         
     | 
| 488 | 
         
            +
             
     | 
| 489 | 
         
            +
             
     | 
| 490 | 
         
            +
            def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
         
     | 
| 491 | 
         
            +
                                                           exclude_frozen_parameters):
         
     | 
| 492 | 
         
            +
                state_dict = OrderedDict()
         
     | 
| 493 | 
         
            +
             
     | 
| 494 | 
         
            +
                # buffers
         
     | 
| 495 | 
         
            +
                buffers = zero_model_states[0].buffers
         
     | 
| 496 | 
         
            +
                state_dict.update(buffers)
         
     | 
| 497 | 
         
            +
                if debug:
         
     | 
| 498 | 
         
            +
                    print(f"added {len(buffers)} buffers")
         
     | 
| 499 | 
         
            +
             
     | 
| 500 | 
         
            +
                if not exclude_frozen_parameters:
         
     | 
| 501 | 
         
            +
                    _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
         
     | 
| 502 | 
         
            +
             
     | 
| 503 | 
         
            +
                _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
         
     | 
| 504 | 
         
            +
             
     | 
| 505 | 
         
            +
                # recover shared parameters
         
     | 
| 506 | 
         
            +
                for pair in zero_model_states[0].shared_params:
         
     | 
| 507 | 
         
            +
                    if pair[1] in state_dict:
         
     | 
| 508 | 
         
            +
                        state_dict[pair[0]] = state_dict[pair[1]]
         
     | 
| 509 | 
         
            +
             
     | 
| 510 | 
         
            +
                return state_dict
         
     | 
| 511 | 
         
            +
             
     | 
| 512 | 
         
            +
             
     | 
| 513 | 
         
            +
            def to_torch_tensor(state_dict, return_empty_tensor=False):
         
     | 
| 514 | 
         
            +
                """
         
     | 
| 515 | 
         
            +
                Convert state_dict of GatheredTensor to torch tensor
         
     | 
| 516 | 
         
            +
                """
         
     | 
| 517 | 
         
            +
                torch_state_dict = {}
         
     | 
| 518 | 
         
            +
                converted_tensors = {}
         
     | 
| 519 | 
         
            +
                for name, tensor in state_dict.items():
         
     | 
| 520 | 
         
            +
                    tensor_id = id(tensor)
         
     | 
| 521 | 
         
            +
                    if tensor_id in converted_tensors:  # shared tensors
         
     | 
| 522 | 
         
            +
                        shared_tensor = torch_state_dict[converted_tensors[tensor_id]]
         
     | 
| 523 | 
         
            +
                        torch_state_dict[name] = shared_tensor
         
     | 
| 524 | 
         
            +
                    else:
         
     | 
| 525 | 
         
            +
                        converted_tensors[tensor_id] = name
         
     | 
| 526 | 
         
            +
                        if return_empty_tensor:
         
     | 
| 527 | 
         
            +
                            torch_state_dict[name] = torch.empty(tensor.shape, dtype=tensor.dtype)
         
     | 
| 528 | 
         
            +
                        else:
         
     | 
| 529 | 
         
            +
                            torch_state_dict[name] = tensor.contiguous()
         
     | 
| 530 | 
         
            +
                return torch_state_dict
         
     | 
| 531 | 
         
            +
             
     | 
| 532 | 
         
            +
             
     | 
| 533 | 
         
            +
            def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir,
         
     | 
| 534 | 
         
            +
                                                         tag=None,
         
     | 
| 535 | 
         
            +
                                                         exclude_frozen_parameters=False,
         
     | 
| 536 | 
         
            +
                                                         lazy_mode=False):
         
     | 
| 537 | 
         
            +
                """
         
     | 
| 538 | 
         
            +
                Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
         
     | 
| 539 | 
         
            +
                ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
         
     | 
| 540 | 
         
            +
                via a model hub.
         
     | 
| 541 | 
         
            +
             
     | 
| 542 | 
         
            +
                Args:
         
     | 
| 543 | 
         
            +
                    - ``checkpoint_dir``: path to the desired checkpoint folder
         
     | 
| 544 | 
         
            +
                    - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
         
     | 
| 545 | 
         
            +
                    - ``exclude_frozen_parameters``: exclude frozen parameters
         
     | 
| 546 | 
         
            +
                    - ``lazy_mode``: get state_dict in lazy mode. It returns a dict of pesduo tensor instead of torch tensor, which is more memory efficient.
         
     | 
| 547 | 
         
            +
                      Convert the pesduo tensor to torch tensor by ``.contiguous()``
         
     | 
| 548 | 
         
            +
             
     | 
| 549 | 
         
            +
                Returns:
         
     | 
| 550 | 
         
            +
                    - pytorch ``state_dict``
         
     | 
| 551 | 
         
            +
             
     | 
| 552 | 
         
            +
                A typical usage might be ::
         
     | 
| 553 | 
         
            +
             
     | 
| 554 | 
         
            +
                    from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
         
     | 
| 555 | 
         
            +
                    # do the training and checkpoint saving
         
     | 
| 556 | 
         
            +
                    state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
         
     | 
| 557 | 
         
            +
                    model = model.cpu() # move to cpu
         
     | 
| 558 | 
         
            +
                    model.load_state_dict(state_dict)
         
     | 
| 559 | 
         
            +
                    # submit to model hub or save the model to share with others
         
     | 
| 560 | 
         
            +
             
     | 
| 561 | 
         
            +
                In this example the ``model`` will no longer be usable in the deepspeed context of the same
         
     | 
| 562 | 
         
            +
                application. i.e. you will need to re-initialize the deepspeed engine, since
         
     | 
| 563 | 
         
            +
                ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
         
     | 
| 564 | 
         
            +
             
     | 
| 565 | 
         
            +
                If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
         
     | 
| 566 | 
         
            +
             
     | 
| 567 | 
         
            +
                Note: the above usage may not work if your application doesn't have sufficient free CPU memory.
         
     | 
| 568 | 
         
            +
                You may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
         
     | 
| 569 | 
         
            +
                the checkpoint. Or you can load state_dict in lazy mode ::
         
     | 
| 570 | 
         
            +
             
     | 
| 571 | 
         
            +
                    from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
         
     | 
| 572 | 
         
            +
                    state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, lazy_mode=True) # not on cpu
         
     | 
| 573 | 
         
            +
                    for name, lazy_tensor in state_dict.item():
         
     | 
| 574 | 
         
            +
                        tensor = lazy_tensor.contiguous()  # to cpu
         
     | 
| 575 | 
         
            +
                        print(name, tensor)
         
     | 
| 576 | 
         
            +
                        # del tensor to release memory if it no longer in use
         
     | 
| 577 | 
         
            +
                """
         
     | 
| 578 | 
         
            +
                if tag is None:
         
     | 
| 579 | 
         
            +
                    latest_path = os.path.join(checkpoint_dir, 'latest')
         
     | 
| 580 | 
         
            +
                    if os.path.isfile(latest_path):
         
     | 
| 581 | 
         
            +
                        with open(latest_path, 'r') as fd:
         
     | 
| 582 | 
         
            +
                            tag = fd.read().strip()
         
     | 
| 583 | 
         
            +
                    else:
         
     | 
| 584 | 
         
            +
                        raise ValueError(f"Unable to find 'latest' file at {latest_path}")
         
     | 
| 585 | 
         
            +
             
     | 
| 586 | 
         
            +
                ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
         
     | 
| 587 | 
         
            +
             
     | 
| 588 | 
         
            +
                if not os.path.isdir(ds_checkpoint_dir):
         
     | 
| 589 | 
         
            +
                    raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
         
     | 
| 590 | 
         
            +
             
     | 
| 591 | 
         
            +
                state_dict = _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
         
     | 
| 592 | 
         
            +
                if lazy_mode:
         
     | 
| 593 | 
         
            +
                    return state_dict
         
     | 
| 594 | 
         
            +
                else:
         
     | 
| 595 | 
         
            +
                    return to_torch_tensor(state_dict)
         
     | 
| 596 | 
         
            +
             
     | 
| 597 | 
         
            +
             
     | 
| 598 | 
         
            +
            def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir,
         
     | 
| 599 | 
         
            +
                                                           output_dir,
         
     | 
| 600 | 
         
            +
                                                           max_shard_size="5GB",
         
     | 
| 601 | 
         
            +
                                                           safe_serialization=False,
         
     | 
| 602 | 
         
            +
                                                           tag=None,
         
     | 
| 603 | 
         
            +
                                                           exclude_frozen_parameters=False):
         
     | 
| 604 | 
         
            +
                """
         
     | 
| 605 | 
         
            +
                Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
         
     | 
| 606 | 
         
            +
                loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
         
     | 
| 607 | 
         
            +
             
     | 
| 608 | 
         
            +
                Args:
         
     | 
| 609 | 
         
            +
                    - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
         
     | 
| 610 | 
         
            +
                    - ``output_dir``: directory to the pytorch fp32 state_dict output files
         
     | 
| 611 | 
         
            +
                    - ``max_shard_size``: the maximum size for a checkpoint before being sharded, default value is 5GB
         
     | 
| 612 | 
         
            +
                    - ``safe_serialization``:  whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).
         
     | 
| 613 | 
         
            +
                    - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
         
     | 
| 614 | 
         
            +
                    - ``exclude_frozen_parameters``: exclude frozen parameters
         
     | 
| 615 | 
         
            +
                """
         
     | 
| 616 | 
         
            +
             
     | 
| 617 | 
         
            +
                # Dependency pre-check
         
     | 
| 618 | 
         
            +
                if safe_serialization:
         
     | 
| 619 | 
         
            +
                    try:
         
     | 
| 620 | 
         
            +
                        from safetensors.torch import save_file
         
     | 
| 621 | 
         
            +
                    except ImportError:
         
     | 
| 622 | 
         
            +
                        print('If you want to use `safe_serialization`, please `pip install safetensors`')
         
     | 
| 623 | 
         
            +
                        raise
         
     | 
| 624 | 
         
            +
                if max_shard_size is not None:
         
     | 
| 625 | 
         
            +
                    try:
         
     | 
| 626 | 
         
            +
                        from huggingface_hub import split_torch_state_dict_into_shards
         
     | 
| 627 | 
         
            +
                    except ImportError:
         
     | 
| 628 | 
         
            +
                        print('If you want to use `max_shard_size`, please `pip install huggingface_hub`')
         
     | 
| 629 | 
         
            +
                        raise
         
     | 
| 630 | 
         
            +
             
     | 
| 631 | 
         
            +
                # Convert zero checkpoint to state_dict
         
     | 
| 632 | 
         
            +
                state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir,
         
     | 
| 633 | 
         
            +
                                                                      tag,
         
     | 
| 634 | 
         
            +
                                                                      exclude_frozen_parameters,
         
     | 
| 635 | 
         
            +
                                                                      lazy_mode=True)
         
     | 
| 636 | 
         
            +
             
     | 
| 637 | 
         
            +
                # Shard the model if it is too big.
         
     | 
| 638 | 
         
            +
                weights_name = "model.safetensors" if safe_serialization else "pytorch_model.bin"
         
     | 
| 639 | 
         
            +
                if max_shard_size is not None:
         
     | 
| 640 | 
         
            +
                    filename_pattern = weights_name.replace(".bin", "{suffix}.bin").replace(".safetensors", "{suffix}.safetensors")
         
     | 
| 641 | 
         
            +
                    # an memory-efficient approach for sharding
         
     | 
| 642 | 
         
            +
                    empty_state_dict = to_torch_tensor(state_dict, return_empty_tensor=True)
         
     | 
| 643 | 
         
            +
                    state_dict_split = split_torch_state_dict_into_shards(empty_state_dict,
         
     | 
| 644 | 
         
            +
                                                                          filename_pattern=filename_pattern,
         
     | 
| 645 | 
         
            +
                                                                          max_shard_size=max_shard_size)
         
     | 
| 646 | 
         
            +
                else:
         
     | 
| 647 | 
         
            +
                    from collections import namedtuple
         
     | 
| 648 | 
         
            +
                    StateDictSplit = namedtuple("StateDictSplit", ["is_sharded", "filename_to_tensors"])
         
     | 
| 649 | 
         
            +
                    state_dict_split = StateDictSplit(is_sharded=False,
         
     | 
| 650 | 
         
            +
                                                      filename_to_tensors={weights_name: list(state_dict.keys())})
         
     | 
| 651 | 
         
            +
             
     | 
| 652 | 
         
            +
                # Save the model by shard
         
     | 
| 653 | 
         
            +
                os.makedirs(output_dir, exist_ok=True)
         
     | 
| 654 | 
         
            +
                filename_to_tensors = state_dict_split.filename_to_tensors.items()
         
     | 
| 655 | 
         
            +
                for shard_file, tensors in tqdm(filename_to_tensors, desc="Saving checkpoint shards"):
         
     | 
| 656 | 
         
            +
                    shard_state_dict = {tensor_name: state_dict[tensor_name] for tensor_name in tensors}
         
     | 
| 657 | 
         
            +
                    shard_state_dict = to_torch_tensor(shard_state_dict)
         
     | 
| 658 | 
         
            +
                    output_path = os.path.join(output_dir, shard_file)
         
     | 
| 659 | 
         
            +
                    if safe_serialization:
         
     | 
| 660 | 
         
            +
                        save_file(shard_state_dict, output_path, metadata={"format": "pt"})
         
     | 
| 661 | 
         
            +
                    else:
         
     | 
| 662 | 
         
            +
                        torch.save(shard_state_dict, output_path)
         
     | 
| 663 | 
         
            +
                    # release the memory of current shard
         
     | 
| 664 | 
         
            +
                    for tensor_name in list(shard_state_dict.keys()):
         
     | 
| 665 | 
         
            +
                        del state_dict[tensor_name]
         
     | 
| 666 | 
         
            +
                        del shard_state_dict[tensor_name]
         
     | 
| 667 | 
         
            +
                    del shard_state_dict
         
     | 
| 668 | 
         
            +
                    gc.collect()
         
     | 
| 669 | 
         
            +
             
     | 
| 670 | 
         
            +
                # Save index if sharded
         
     | 
| 671 | 
         
            +
                if state_dict_split.is_sharded:
         
     | 
| 672 | 
         
            +
                    index = {
         
     | 
| 673 | 
         
            +
                        "metadata": state_dict_split.metadata,
         
     | 
| 674 | 
         
            +
                        "weight_map": state_dict_split.tensor_to_filename,
         
     | 
| 675 | 
         
            +
                    }
         
     | 
| 676 | 
         
            +
                    save_index_file = "model.safetensors.index.json" if safe_serialization else "pytorch_model.bin.index.json"
         
     | 
| 677 | 
         
            +
                    save_index_file = os.path.join(output_dir, save_index_file)
         
     | 
| 678 | 
         
            +
                    with open(save_index_file, "w", encoding="utf-8") as f:
         
     | 
| 679 | 
         
            +
                        content = json.dumps(index, indent=2, sort_keys=True) + "\n"
         
     | 
| 680 | 
         
            +
                        f.write(content)
         
     | 
| 681 | 
         
            +
             
     | 
| 682 | 
         
            +
             
     | 
| 683 | 
         
            +
            def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
         
     | 
| 684 | 
         
            +
                """
         
     | 
| 685 | 
         
            +
                1. Put the provided model to cpu
         
     | 
| 686 | 
         
            +
                2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
         
     | 
| 687 | 
         
            +
                3. Load it into the provided model
         
     | 
| 688 | 
         
            +
             
     | 
| 689 | 
         
            +
                Args:
         
     | 
| 690 | 
         
            +
                    - ``model``: the model object to update
         
     | 
| 691 | 
         
            +
                    - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
         
     | 
| 692 | 
         
            +
                    - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
         
     | 
| 693 | 
         
            +
             
     | 
| 694 | 
         
            +
                Returns:
         
     | 
| 695 | 
         
            +
                    - ``model`: modified model
         
     | 
| 696 | 
         
            +
             
     | 
| 697 | 
         
            +
                Make sure you have plenty of CPU memory available before you call this function. If you don't
         
     | 
| 698 | 
         
            +
                have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
         
     | 
| 699 | 
         
            +
                conveniently placed for you in the checkpoint folder.
         
     | 
| 700 | 
         
            +
             
     | 
| 701 | 
         
            +
                A typical usage might be ::
         
     | 
| 702 | 
         
            +
             
     | 
| 703 | 
         
            +
                    from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
         
     | 
| 704 | 
         
            +
                    model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
         
     | 
| 705 | 
         
            +
                    # submit to model hub or save the model to share with others
         
     | 
| 706 | 
         
            +
             
     | 
| 707 | 
         
            +
                Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
         
     | 
| 708 | 
         
            +
                of the same application. i.e. you will need to re-initialize the deepspeed engine, since
         
     | 
| 709 | 
         
            +
                ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
         
     | 
| 710 | 
         
            +
             
     | 
| 711 | 
         
            +
                """
         
     | 
| 712 | 
         
            +
                logger.info(f"Extracting fp32 weights")
         
     | 
| 713 | 
         
            +
                state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
         
     | 
| 714 | 
         
            +
             
     | 
| 715 | 
         
            +
                logger.info(f"Overwriting model with fp32 weights")
         
     | 
| 716 | 
         
            +
                model = model.cpu()
         
     | 
| 717 | 
         
            +
                model.load_state_dict(state_dict, strict=False)
         
     | 
| 718 | 
         
            +
             
     | 
| 719 | 
         
            +
                return model
         
     | 
| 720 | 
         
            +
             
     | 
| 721 | 
         
            +
             
     | 
| 722 | 
         
            +
            if __name__ == "__main__":
         
     | 
| 723 | 
         
            +
                parser = argparse.ArgumentParser()
         
     | 
| 724 | 
         
            +
                parser.add_argument("checkpoint_dir",
         
     | 
| 725 | 
         
            +
                                    type=str,
         
     | 
| 726 | 
         
            +
                                    help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
         
     | 
| 727 | 
         
            +
                parser.add_argument("output_dir",
         
     | 
| 728 | 
         
            +
                                    type=str,
         
     | 
| 729 | 
         
            +
                                    help="directory to the pytorch fp32 state_dict output files"
         
     | 
| 730 | 
         
            +
                                    "(e.g. path/checkpoint-12-output/)")
         
     | 
| 731 | 
         
            +
                parser.add_argument(
         
     | 
| 732 | 
         
            +
                    "--max_shard_size",
         
     | 
| 733 | 
         
            +
                    type=str,
         
     | 
| 734 | 
         
            +
                    default="5GB",
         
     | 
| 735 | 
         
            +
                    help="The maximum size for a checkpoint before being sharded. Checkpoints shard will then be each of size"
         
     | 
| 736 | 
         
            +
                    "lower than this size. If expressed as a string, needs to be digits followed by a unit (like `5MB`"
         
     | 
| 737 | 
         
            +
                    "We default it to 5GB in order for models to be able to run easily on free-tier google colab instances"
         
     | 
| 738 | 
         
            +
                    "without CPU OOM issues.")
         
     | 
| 739 | 
         
            +
                parser.add_argument(
         
     | 
| 740 | 
         
            +
                    "--safe_serialization",
         
     | 
| 741 | 
         
            +
                    default=False,
         
     | 
| 742 | 
         
            +
                    action='store_true',
         
     | 
| 743 | 
         
            +
                    help="Whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).")
         
     | 
| 744 | 
         
            +
                parser.add_argument("-t",
         
     | 
| 745 | 
         
            +
                                    "--tag",
         
     | 
| 746 | 
         
            +
                                    type=str,
         
     | 
| 747 | 
         
            +
                                    default=None,
         
     | 
| 748 | 
         
            +
                                    help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
         
     | 
| 749 | 
         
            +
                parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
         
     | 
| 750 | 
         
            +
                parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
         
     | 
| 751 | 
         
            +
                args = parser.parse_args()
         
     | 
| 752 | 
         
            +
             
     | 
| 753 | 
         
            +
                debug = args.debug
         
     | 
| 754 | 
         
            +
             
     | 
| 755 | 
         
            +
                convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
         
     | 
| 756 | 
         
            +
                                                           args.output_dir,
         
     | 
| 757 | 
         
            +
                                                           max_shard_size=args.max_shard_size,
         
     | 
| 758 | 
         
            +
                                                           safe_serialization=args.safe_serialization,
         
     | 
| 759 | 
         
            +
                                                           tag=args.tag,
         
     | 
| 760 | 
         
            +
                                                           exclude_frozen_parameters=args.exclude_frozen_parameters)
         
     |