EC2 Default User
commited on
Commit
·
6fbe68c
1
Parent(s):
3421e92
search model artifacts
Browse files- 0_Transformer/config.json +19 -0
- 0_Transformer/pytorch_model.bin +3 -0
- 0_Transformer/sentence_bert_config.json +3 -0
- 0_Transformer/special_tokens_map.json +1 -0
- 0_Transformer/tokenizer_config.json +1 -0
- 0_Transformer/vocab.txt +0 -0
- 1_Pooling/config.json +7 -0
- accuracy_evaluation_results.csv +184 -0
- modules.json +14 -0
- train_loss_classifier.joblib +3 -0
0_Transformer/config.json
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"BertModel"
|
| 4 |
+
],
|
| 5 |
+
"attention_probs_dropout_prob": 0.1,
|
| 6 |
+
"hidden_act": "gelu",
|
| 7 |
+
"hidden_dropout_prob": 0.1,
|
| 8 |
+
"hidden_size": 768,
|
| 9 |
+
"initializer_range": 0.02,
|
| 10 |
+
"intermediate_size": 3072,
|
| 11 |
+
"layer_norm_eps": 1e-12,
|
| 12 |
+
"max_position_embeddings": 512,
|
| 13 |
+
"model_type": "bert",
|
| 14 |
+
"num_attention_heads": 12,
|
| 15 |
+
"num_hidden_layers": 12,
|
| 16 |
+
"pad_token_id": 0,
|
| 17 |
+
"type_vocab_size": 2,
|
| 18 |
+
"vocab_size": 30522
|
| 19 |
+
}
|
0_Transformer/pytorch_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:920711b2bf7e02cae02f9805e3abd0130ee7664b4b661c66b64e958495143797
|
| 3 |
+
size 437976759
|
0_Transformer/sentence_bert_config.json
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"max_seq_length": 128
|
| 3 |
+
}
|
0_Transformer/special_tokens_map.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
|
0_Transformer/tokenizer_config.json
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
{}
|
0_Transformer/vocab.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
1_Pooling/config.json
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"word_embedding_dimension": 768,
|
| 3 |
+
"pooling_mode_cls_token": false,
|
| 4 |
+
"pooling_mode_mean_tokens": true,
|
| 5 |
+
"pooling_mode_max_tokens": false,
|
| 6 |
+
"pooling_mode_mean_sqrt_len_tokens": false
|
| 7 |
+
}
|
accuracy_evaluation_results.csv
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
epoch,steps,accuracy
|
| 2 |
+
0,1000,0.5971326164874552
|
| 3 |
+
0,2000,0.6616487455197133
|
| 4 |
+
0,3000,0.6738351254480287
|
| 5 |
+
0,4000,0.6659498207885305
|
| 6 |
+
0,5000,0.6494623655913978
|
| 7 |
+
0,6000,0.6688172043010753
|
| 8 |
+
0,7000,0.7089605734767025
|
| 9 |
+
0,8000,0.7060931899641577
|
| 10 |
+
0,9000,0.7146953405017921
|
| 11 |
+
0,10000,0.7111111111111111
|
| 12 |
+
0,11000,0.7161290322580646
|
| 13 |
+
0,12000,0.7103942652329749
|
| 14 |
+
0,13000,0.7433691756272401
|
| 15 |
+
0,14000,0.7383512544802867
|
| 16 |
+
0,15000,0.7347670250896058
|
| 17 |
+
0,16000,0.7161290322580646
|
| 18 |
+
0,17000,0.7010752688172043
|
| 19 |
+
0,18000,0.7376344086021506
|
| 20 |
+
0,19000,0.7268817204301076
|
| 21 |
+
0,20000,0.7577060931899642
|
| 22 |
+
0,21000,0.7448028673835125
|
| 23 |
+
0,22000,0.7362007168458782
|
| 24 |
+
0,23000,0.7584229390681003
|
| 25 |
+
0,24000,0.7627240143369176
|
| 26 |
+
0,25000,0.7584229390681003
|
| 27 |
+
0,26000,0.7548387096774194
|
| 28 |
+
0,27000,0.7555555555555555
|
| 29 |
+
0,28000,0.7555555555555555
|
| 30 |
+
0,29000,0.7763440860215054
|
| 31 |
+
0,30000,0.753405017921147
|
| 32 |
+
0,31000,0.7663082437275985
|
| 33 |
+
0,32000,0.7469534050179212
|
| 34 |
+
0,33000,0.7756272401433691
|
| 35 |
+
0,34000,0.7792114695340502
|
| 36 |
+
0,35000,0.7863799283154121
|
| 37 |
+
0,36000,0.7863799283154121
|
| 38 |
+
0,37000,0.7842293906810036
|
| 39 |
+
0,38000,0.771326164874552
|
| 40 |
+
0,39000,0.7684587813620072
|
| 41 |
+
0,40000,0.7842293906810036
|
| 42 |
+
0,41000,0.771326164874552
|
| 43 |
+
0,42000,0.7870967741935484
|
| 44 |
+
0,43000,0.7971326164874551
|
| 45 |
+
0,44000,0.7885304659498208
|
| 46 |
+
0,45000,0.7899641577060932
|
| 47 |
+
0,46000,0.7992831541218638
|
| 48 |
+
0,47000,0.8021505376344086
|
| 49 |
+
0,48000,0.7885304659498208
|
| 50 |
+
0,49000,0.7870967741935484
|
| 51 |
+
0,50000,0.7942652329749104
|
| 52 |
+
0,51000,0.7906810035842294
|
| 53 |
+
0,52000,0.7956989247311828
|
| 54 |
+
0,53000,0.7978494623655914
|
| 55 |
+
0,54000,0.8050179211469534
|
| 56 |
+
0,55000,0.8
|
| 57 |
+
0,56000,0.8050179211469534
|
| 58 |
+
0,57000,0.8114695340501792
|
| 59 |
+
0,58000,0.8093189964157707
|
| 60 |
+
0,59000,0.8129032258064516
|
| 61 |
+
0,60000,0.8078853046594983
|
| 62 |
+
0,-1,0.8121863799283154
|
| 63 |
+
1,1000,0.796415770609319
|
| 64 |
+
1,2000,0.810752688172043
|
| 65 |
+
1,3000,0.8100358422939068
|
| 66 |
+
1,4000,0.8021505376344086
|
| 67 |
+
1,5000,0.8014336917562724
|
| 68 |
+
1,6000,0.8136200716845878
|
| 69 |
+
1,7000,0.8121863799283154
|
| 70 |
+
1,8000,0.814336917562724
|
| 71 |
+
1,9000,0.8064516129032258
|
| 72 |
+
1,10000,0.807168458781362
|
| 73 |
+
1,11000,0.807168458781362
|
| 74 |
+
1,12000,0.810752688172043
|
| 75 |
+
1,13000,0.8021505376344086
|
| 76 |
+
1,14000,0.8064516129032258
|
| 77 |
+
1,15000,0.8064516129032258
|
| 78 |
+
1,16000,0.8172043010752689
|
| 79 |
+
1,17000,0.807168458781362
|
| 80 |
+
1,18000,0.8114695340501792
|
| 81 |
+
1,19000,0.803584229390681
|
| 82 |
+
1,20000,0.810752688172043
|
| 83 |
+
1,21000,0.8007168458781362
|
| 84 |
+
1,22000,0.8150537634408602
|
| 85 |
+
1,23000,0.7971326164874551
|
| 86 |
+
1,24000,0.8057347670250896
|
| 87 |
+
1,25000,0.8014336917562724
|
| 88 |
+
1,26000,0.8028673835125448
|
| 89 |
+
1,27000,0.8136200716845878
|
| 90 |
+
1,28000,0.8172043010752689
|
| 91 |
+
1,29000,0.8050179211469534
|
| 92 |
+
1,30000,0.8136200716845878
|
| 93 |
+
1,31000,0.8121863799283154
|
| 94 |
+
1,32000,0.8150537634408602
|
| 95 |
+
1,33000,0.8157706093189964
|
| 96 |
+
1,34000,0.814336917562724
|
| 97 |
+
1,35000,0.821505376344086
|
| 98 |
+
1,36000,0.8064516129032258
|
| 99 |
+
1,37000,0.8150537634408602
|
| 100 |
+
1,38000,0.8157706093189964
|
| 101 |
+
1,39000,0.8243727598566308
|
| 102 |
+
1,40000,0.8186379928315413
|
| 103 |
+
1,41000,0.8193548387096774
|
| 104 |
+
1,42000,0.8086021505376344
|
| 105 |
+
1,43000,0.8222222222222222
|
| 106 |
+
1,44000,0.8207885304659498
|
| 107 |
+
1,45000,0.8186379928315413
|
| 108 |
+
1,46000,0.8193548387096774
|
| 109 |
+
1,47000,0.8193548387096774
|
| 110 |
+
1,48000,0.825089605734767
|
| 111 |
+
1,49000,0.8229390681003584
|
| 112 |
+
1,50000,0.817921146953405
|
| 113 |
+
1,51000,0.8222222222222222
|
| 114 |
+
1,52000,0.8150537634408602
|
| 115 |
+
1,53000,0.8186379928315413
|
| 116 |
+
1,54000,0.8136200716845878
|
| 117 |
+
1,55000,0.8164874551971326
|
| 118 |
+
1,56000,0.8207885304659498
|
| 119 |
+
1,57000,0.821505376344086
|
| 120 |
+
1,58000,0.814336917562724
|
| 121 |
+
1,59000,0.8207885304659498
|
| 122 |
+
1,60000,0.8193548387096774
|
| 123 |
+
1,-1,0.817921146953405
|
| 124 |
+
2,1000,0.8114695340501792
|
| 125 |
+
2,2000,0.8164874551971326
|
| 126 |
+
2,3000,0.8193548387096774
|
| 127 |
+
2,4000,0.8129032258064516
|
| 128 |
+
2,5000,0.8164874551971326
|
| 129 |
+
2,6000,0.8136200716845878
|
| 130 |
+
2,7000,0.8193548387096774
|
| 131 |
+
2,8000,0.821505376344086
|
| 132 |
+
2,9000,0.8186379928315413
|
| 133 |
+
2,10000,0.8136200716845878
|
| 134 |
+
2,11000,0.8121863799283154
|
| 135 |
+
2,12000,0.8136200716845878
|
| 136 |
+
2,13000,0.8207885304659498
|
| 137 |
+
2,14000,0.8157706093189964
|
| 138 |
+
2,15000,0.8172043010752689
|
| 139 |
+
2,16000,0.8157706093189964
|
| 140 |
+
2,17000,0.8243727598566308
|
| 141 |
+
2,18000,0.825089605734767
|
| 142 |
+
2,19000,0.828673835125448
|
| 143 |
+
2,20000,0.821505376344086
|
| 144 |
+
2,21000,0.825089605734767
|
| 145 |
+
2,22000,0.821505376344086
|
| 146 |
+
2,23000,0.825089605734767
|
| 147 |
+
2,24000,0.825089605734767
|
| 148 |
+
2,25000,0.8258064516129032
|
| 149 |
+
2,26000,0.825089605734767
|
| 150 |
+
2,27000,0.8293906810035843
|
| 151 |
+
2,28000,0.8229390681003584
|
| 152 |
+
2,29000,0.8193548387096774
|
| 153 |
+
2,30000,0.8207885304659498
|
| 154 |
+
2,31000,0.8229390681003584
|
| 155 |
+
2,32000,0.814336917562724
|
| 156 |
+
2,33000,0.8186379928315413
|
| 157 |
+
2,34000,0.8186379928315413
|
| 158 |
+
2,35000,0.8229390681003584
|
| 159 |
+
2,36000,0.8222222222222222
|
| 160 |
+
2,37000,0.814336917562724
|
| 161 |
+
2,38000,0.8207885304659498
|
| 162 |
+
2,39000,0.8186379928315413
|
| 163 |
+
2,40000,0.817921146953405
|
| 164 |
+
2,41000,0.8229390681003584
|
| 165 |
+
2,42000,0.8172043010752689
|
| 166 |
+
2,43000,0.825089605734767
|
| 167 |
+
2,44000,0.8258064516129032
|
| 168 |
+
2,45000,0.8193548387096774
|
| 169 |
+
2,46000,0.8200716845878137
|
| 170 |
+
2,47000,0.8258064516129032
|
| 171 |
+
2,48000,0.821505376344086
|
| 172 |
+
2,49000,0.8222222222222222
|
| 173 |
+
2,50000,0.8157706093189964
|
| 174 |
+
2,51000,0.821505376344086
|
| 175 |
+
2,52000,0.8193548387096774
|
| 176 |
+
2,53000,0.8200716845878137
|
| 177 |
+
2,54000,0.8193548387096774
|
| 178 |
+
2,55000,0.8207885304659498
|
| 179 |
+
2,56000,0.8200716845878137
|
| 180 |
+
2,57000,0.821505376344086
|
| 181 |
+
2,58000,0.8222222222222222
|
| 182 |
+
2,59000,0.8222222222222222
|
| 183 |
+
2,60000,0.8229390681003584
|
| 184 |
+
2,-1,0.8229390681003584
|
modules.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[
|
| 2 |
+
{
|
| 3 |
+
"idx": 0,
|
| 4 |
+
"name": "0",
|
| 5 |
+
"path": "0_Transformer",
|
| 6 |
+
"type": "sentence_transformers.models.Transformer"
|
| 7 |
+
},
|
| 8 |
+
{
|
| 9 |
+
"idx": 1,
|
| 10 |
+
"name": "1",
|
| 11 |
+
"path": "1_Pooling",
|
| 12 |
+
"type": "sentence_transformers.models.Pooling"
|
| 13 |
+
}
|
| 14 |
+
]
|
train_loss_classifier.joblib
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:5f453eb2eb70e5d3ff57ee1a75e8a93b1f4c16aa5f3fd8dd95795976ad74c5a9
|
| 3 |
+
size 28766
|