File size: 710 Bytes
			
			8cc3ef8  | 
								1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31  | 
								{
  "_name_or_path": "tiny_models/xglm/XGLMForCausalLM",
  "activation_dropout": 0.1,
  "activation_function": "gelu",
  "architectures": [
    "XGLMForCausalLM"
  ],
  "attention_dropout": 0.1,
  "attention_heads": 4,
  "bos_token_id": 0,
  "d_model": 32,
  "decoder_start_token_id": 2,
  "dropout": 0.1,
  "eos_token_id": 2,
  "ffn_dim": 37,
  "gradient_checkpointing": false,
  "init_std": 0.02,
  "initializer_range": 0.02,
  "is_decoder": true,
  "layerdrop": 0.0,
  "max_position_embeddings": 512,
  "model_type": "xglm",
  "num_layers": 5,
  "pad_token_id": 1,
  "scale_embedding": true,
  "torch_dtype": "float32",
  "transformers_version": "4.28.0.dev0",
  "use_cache": true,
  "vocab_size": 256008
}
 |