File size: 814 Bytes
			
			f5c7948  | 
								1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39  | 
								attention_logit_softcapping: null
attention_scores_scalar: null
attn_bias: false
bias: false
block_size: 131072
final_logit_softcapping: null
gelu_approximate: none
head_size: 64
hf_config: {}
intermediate_size: 1536
lm_head_bias: false
mlp_class_name: LLaMAMLP
n_embd: 576
n_expert: 0
n_expert_per_token: 0
n_head: 9
n_layer: 32
n_query_groups: 3
name: tangled-alpha-0.14-base
norm_class_name: RMSNorm
norm_eps: 1.0e-05
norm_qk: false
padded_vocab_size: 65536
padding_multiple: 512
parallel_residual: false
post_attention_norm: false
post_mlp_norm: false
rope_adjustments: null
rope_base: 84000
rope_condense_ratio: 1
rope_indices: null
rope_local_base_freq: null
rotary_percentage: 1.0
scale_embeddings: false
shared_attention_norm: false
sliding_window_indices: null
sliding_window_size: null
vocab_size: 65536
 |