joelniklaus HF Staff commited on
Commit
0a2c663
·
1 Parent(s): 698e519

added model files

Browse files
README.md ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ # bert-base-uncased-sem_eval_2010_task_8
2
+
3
+ Trained for 3 epochs
4
+
5
+ Batch-size: 6
6
+
7
+ Seed: 42
8
+
9
+ Test F1-Score: 0.8
config.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/home/joel/transformers/sem_eval_2010_task_8/bert-base-uncased-local/results/42/checkpoint-1700",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "finetuning_task": "joelito/sem_eval_2010_task_8",
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "id2label": {
13
+ "0": "Cause-Effect(e1,e2)",
14
+ "1": "Cause-Effect(e2,e1)",
15
+ "2": "Component-Whole(e1,e2)",
16
+ "3": "Component-Whole(e2,e1)",
17
+ "4": "Content-Container(e1,e2)",
18
+ "5": "Content-Container(e2,e1)",
19
+ "6": "Entity-Destination(e1,e2)",
20
+ "7": "Entity-Destination(e2,e1)",
21
+ "8": "Entity-Origin(e1,e2)",
22
+ "9": "Entity-Origin(e2,e1)",
23
+ "10": "Instrument-Agency(e1,e2)",
24
+ "11": "Instrument-Agency(e2,e1)",
25
+ "12": "Member-Collection(e1,e2)",
26
+ "13": "Member-Collection(e2,e1)",
27
+ "14": "Message-Topic(e1,e2)",
28
+ "15": "Message-Topic(e2,e1)",
29
+ "16": "Product-Producer(e1,e2)",
30
+ "17": "Product-Producer(e2,e1)",
31
+ "18": "Other"
32
+ },
33
+ "initializer_range": 0.02,
34
+ "intermediate_size": 3072,
35
+ "label2id": {
36
+ "Cause-Effect(e1,e2)": 0,
37
+ "Cause-Effect(e2,e1)": 1,
38
+ "Component-Whole(e1,e2)": 2,
39
+ "Component-Whole(e2,e1)": 3,
40
+ "Content-Container(e1,e2)": 4,
41
+ "Content-Container(e2,e1)": 5,
42
+ "Entity-Destination(e1,e2)": 6,
43
+ "Entity-Destination(e2,e1)": 7,
44
+ "Entity-Origin(e1,e2)": 8,
45
+ "Entity-Origin(e2,e1)": 9,
46
+ "Instrument-Agency(e1,e2)": 10,
47
+ "Instrument-Agency(e2,e1)": 11,
48
+ "Member-Collection(e1,e2)": 12,
49
+ "Member-Collection(e2,e1)": 13,
50
+ "Message-Topic(e1,e2)": 14,
51
+ "Message-Topic(e2,e1)": 15,
52
+ "Other": 18,
53
+ "Product-Producer(e1,e2)": 16,
54
+ "Product-Producer(e2,e1)": 17
55
+ },
56
+ "layer_norm_eps": 1e-12,
57
+ "max_position_embeddings": 512,
58
+ "model_type": "bert",
59
+ "num_attention_heads": 12,
60
+ "num_hidden_layers": 12,
61
+ "pad_token_id": 0,
62
+ "type_vocab_size": 2,
63
+ "vocab_size": 30522
64
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0b1c430304687c904a3b4b293b2f448553fad11403403e41cad7a3d1f5849a32
3
+ size 438076745
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
tf_model.h5 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7b8eb7d41de69723c8cf3af03ce891a5265a7f3f94eff730f473ee7a470dcd3
3
+ size 438258092
tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": true, "do_basic_tokenize": true, "never_split": null, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "tokenize_chinese_chars": true, "strip_accents": null, "model_max_length": 512, "name_or_path": "bert-base-uncased"}
vocab.txt ADDED
The diff for this file is too large to render. See raw diff