Transformers
PyTorch
English
wav2vec2
pretraining
speech
patrickvonplaten commited on
Commit
f23233e
·
1 Parent(s): bbc397a
Files changed (2) hide show
  1. config.json +4 -5
  2. pytorch_model.bin +2 -2
config.json CHANGED
@@ -6,7 +6,7 @@
6
  ],
7
  "attention_dropout": 0.1,
8
  "bos_token_id": 1,
9
- "conv_bias": true,
10
  "conv_dim": [
11
  512,
12
  512,
@@ -36,15 +36,14 @@
36
  ],
37
  "ctc_loss_reduction": "sum",
38
  "ctc_zero_infinity": false,
39
- "do_stable_layer_norm": true,
40
  "eos_token_id": 2,
41
  "feat_extract_activation": "gelu",
42
  "feat_extract_dropout": 0.0,
43
- "feat_extract_norm": "layer",
44
  "feat_proj_dropout": 0.1,
45
  "final_dropout": 0.1,
46
- "freeze_feat_extract_train": true,
47
- "gradient_checkpointing": true,
48
  "hidden_act": "gelu",
49
  "hidden_dropout": 0.1,
50
  "hidden_dropout_prob": 0.1,
 
6
  ],
7
  "attention_dropout": 0.1,
8
  "bos_token_id": 1,
9
+ "conv_bias": false,
10
  "conv_dim": [
11
  512,
12
  512,
 
36
  ],
37
  "ctc_loss_reduction": "sum",
38
  "ctc_zero_infinity": false,
39
+ "do_stable_layer_norm": false,
40
  "eos_token_id": 2,
41
  "feat_extract_activation": "gelu",
42
  "feat_extract_dropout": 0.0,
43
+ "feat_extract_norm": "group",
44
  "feat_proj_dropout": 0.1,
45
  "final_dropout": 0.1,
46
+ "gradient_checkpointing": false,
 
47
  "hidden_act": "gelu",
48
  "hidden_dropout": 0.1,
49
  "hidden_dropout_prob": 0.1,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d561761277e7943efeff2411722dcc4e5f9f552eee9a96674923528654d4a2e4
3
- size 1261920069
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19d5205f3b520d9ff2e982d5aed82bf7d9d855ddb78a64c11fc1b33569b1c05f
3
+ size 1261874330