| { | |
| "activation_dropout": 0.1, | |
| "apply_spec_augment": true, | |
| "architectures": [ | |
| "UniSpeechSatModel" | |
| ], | |
| "attention_dropout": 0.1, | |
| "bos_token_id": 1, | |
| "classifier_proj_size": 256, | |
| "codevector_dim": 256, | |
| "contrastive_logits_temperature": 0.1, | |
| "conv_bias": false, | |
| "conv_dim": [ | |
| 32, | |
| 32, | |
| 32 | |
| ], | |
| "conv_kernel": [ | |
| 8, | |
| 8, | |
| 8 | |
| ], | |
| "conv_stride": [ | |
| 4, | |
| 4, | |
| 4 | |
| ], | |
| "ctc_loss_reduction": "mean", | |
| "ctc_zero_infinity": false, | |
| "diversity_loss_weight": 0.1, | |
| "do_stable_layer_norm": false, | |
| "eos_token_id": 2, | |
| "feat_extract_activation": "gelu", | |
| "feat_extract_dropout": 0.0, | |
| "feat_extract_norm": "group", | |
| "feat_proj_dropout": 0.0, | |
| "feat_quantizer_dropout": 0.0, | |
| "final_dropout": 0.1, | |
| "hidden_act": "gelu", | |
| "hidden_dropout": 0.1, | |
| "hidden_dropout_prob": 0.1, | |
| "hidden_size": 16, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 20, | |
| "layer_norm_eps": 1e-05, | |
| "layerdrop": 0.1, | |
| "mask_feature_length": 10, | |
| "mask_feature_min_masks": 0, | |
| "mask_feature_prob": 0.0, | |
| "mask_time_length": 2, | |
| "mask_time_min_masks": 2, | |
| "mask_time_prob": 0.5, | |
| "model_type": "unispeech-sat", | |
| "num_attention_heads": 2, | |
| "num_clusters": 504, | |
| "num_codevector_groups": 2, | |
| "num_codevectors_per_group": 320, | |
| "num_conv_pos_embedding_groups": 2, | |
| "num_conv_pos_embeddings": 16, | |
| "num_feat_extract_layers": 3, | |
| "num_hidden_layers": 4, | |
| "num_negatives": 100, | |
| "pad_token_id": 0, | |
| "proj_codevector_dim": 256, | |
| "tdnn_dilation": [ | |
| 1, | |
| 1 | |
| ], | |
| "tdnn_dim": [ | |
| 32, | |
| 32 | |
| ], | |
| "tdnn_kernel": [ | |
| 3, | |
| 3 | |
| ], | |
| "torch_dtype": "float32", | |
| "transformers_version": "4.28.0.dev0", | |
| "use_weighted_layer_sum": false, | |
| "vocab_size": 32, | |
| "xvector_output_dim": 32 | |
| } | |