nazemi commited on
Commit
2a9117c
·
1 Parent(s): 665408d

Training in progress, epoch 1

Browse files
config.json CHANGED
@@ -1,12 +1,12 @@
1
  {
2
- "_name_or_path": "t5-base",
3
  "architectures": [
4
  "T5ForConditionalGeneration"
5
  ],
6
  "classifier_dropout": 0.0,
7
- "d_ff": 3072,
8
  "d_kv": 64,
9
- "d_model": 768,
10
  "decoder_start_token_id": 0,
11
  "dense_act_fn": "relu",
12
  "dropout_rate": 0.1,
@@ -18,9 +18,9 @@
18
  "layer_norm_epsilon": 1e-06,
19
  "model_type": "t5",
20
  "n_positions": 512,
21
- "num_decoder_layers": 12,
22
- "num_heads": 12,
23
- "num_layers": 12,
24
  "output_past": true,
25
  "pad_token_id": 0,
26
  "relative_attention_max_distance": 128,
 
1
  {
2
+ "_name_or_path": "t5-large",
3
  "architectures": [
4
  "T5ForConditionalGeneration"
5
  ],
6
  "classifier_dropout": 0.0,
7
+ "d_ff": 4096,
8
  "d_kv": 64,
9
+ "d_model": 1024,
10
  "decoder_start_token_id": 0,
11
  "dense_act_fn": "relu",
12
  "dropout_rate": 0.1,
 
18
  "layer_norm_epsilon": 1e-06,
19
  "model_type": "t5",
20
  "n_positions": 512,
21
+ "num_decoder_layers": 24,
22
+ "num_heads": 16,
23
+ "num_layers": 24,
24
  "output_past": true,
25
  "pad_token_id": 0,
26
  "relative_attention_max_distance": 128,
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f29b98186e6dabd984698798ddb11c5962af20d6b90339140842292f67f89bf
3
+ size 2950734544
runs/Jan16_20-44-57_192168021136.tpgi.com.au/events.out.tfevents.1705409099.192168021136.tpgi.com.au.2926.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b75c8cbce7a15dd20269dd5d1424c7d4b481e6f467424762d9199e57ae298123
3
+ size 37024
special_tokens_map.json CHANGED
@@ -101,25 +101,7 @@
101
  "<extra_id_98>",
102
  "<extra_id_99>"
103
  ],
104
- "eos_token": {
105
- "content": "</s>",
106
- "lstrip": false,
107
- "normalized": false,
108
- "rstrip": false,
109
- "single_word": false
110
- },
111
- "pad_token": {
112
- "content": "<pad>",
113
- "lstrip": false,
114
- "normalized": false,
115
- "rstrip": false,
116
- "single_word": false
117
- },
118
- "unk_token": {
119
- "content": "<unk>",
120
- "lstrip": false,
121
- "normalized": false,
122
- "rstrip": false,
123
- "single_word": false
124
- }
125
  }
 
101
  "<extra_id_98>",
102
  "<extra_id_99>"
103
  ],
104
+ "eos_token": "</s>",
105
+ "pad_token": "<pad>",
106
+ "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
107
  }
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bd991f854fb46c64f1fa9a5ffa32a90d83bb8b496a802ef8b64581c6c21e1255
3
- size 2422519
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04e7404608459e95fe67c3cf18a4c34369e15375a86e918d13644e5c7314699f
3
+ size 2422444
tokenizer_config.json CHANGED
@@ -930,10 +930,8 @@
930
  "clean_up_tokenization_spaces": true,
931
  "eos_token": "</s>",
932
  "extra_ids": 100,
933
- "legacy": true,
934
  "model_max_length": 512,
935
  "pad_token": "<pad>",
936
- "sp_model_kwargs": {},
937
  "tokenizer_class": "T5Tokenizer",
938
  "unk_token": "<unk>"
939
  }
 
930
  "clean_up_tokenization_spaces": true,
931
  "eos_token": "</s>",
932
  "extra_ids": 100,
 
933
  "model_max_length": 512,
934
  "pad_token": "<pad>",
 
935
  "tokenizer_class": "T5Tokenizer",
936
  "unk_token": "<unk>"
937
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:09de3c93a96f3eb996c13365796654b25c88d7f8959e7f4f10fc194a34430def
3
  size 4792
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:24fc25fe6f4cc233921eee4e62238febbbcc0cad12117c39eae225089076b230
3
  size 4792