Upload 5 files
Browse files- .gitattributes +1 -0
- dev.tsv +3 -0
- loss.tsv +21 -0
- test.tsv +0 -0
- training.log +491 -0
- weights.txt +0 -0
    	
        .gitattributes
    CHANGED
    
    | @@ -30,3 +30,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text | |
| 30 | 
             
            *.zip filter=lfs diff=lfs merge=lfs -text
         | 
| 31 | 
             
            *.zst filter=lfs diff=lfs merge=lfs -text
         | 
| 32 | 
             
            *tfevents* filter=lfs diff=lfs merge=lfs -text
         | 
|  | 
|  | |
| 30 | 
             
            *.zip filter=lfs diff=lfs merge=lfs -text
         | 
| 31 | 
             
            *.zst filter=lfs diff=lfs merge=lfs -text
         | 
| 32 | 
             
            *tfevents* filter=lfs diff=lfs merge=lfs -text
         | 
| 33 | 
            +
            dev.tsv filter=lfs diff=lfs merge=lfs -text
         | 
    	
        dev.tsv
    ADDED
    
    | @@ -0,0 +1,3 @@ | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            version https://git-lfs.github.com/spec/v1
         | 
| 2 | 
            +
            oid sha256:92f892c2879578f5b2652a0f0b22c97409b93dfc4526e0746c224cad4de177ff
         | 
| 3 | 
            +
            size 16231537
         | 
    	
        loss.tsv
    ADDED
    
    | @@ -0,0 +1,21 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            EPOCH	TIMESTAMP	BAD_EPOCHS	LEARNING_RATE	TRAIN_LOSS	DEV_LOSS	DEV_PRECISION	DEV_RECALL	DEV_F1	DEV_ACCURACY
         | 
| 2 | 
            +
            1	16:13:31	4	0.0000	0.5167920140669576	0.07261496782302856	0.6999	0.7008	0.7003	0.5529
         | 
| 3 | 
            +
            2	17:48:56	4	0.0000	0.19001624853523155	0.019461628049612045	0.9127	0.9391	0.9258	0.8711
         | 
| 4 | 
            +
            3	19:24:46	4	0.0000	0.17175931267209962	0.0132540138438344	0.9353	0.9548	0.9449	0.9035
         | 
| 5 | 
            +
            4	21:00:51	4	0.0000	0.1651708900129011	0.011177059262990952	0.9487	0.9583	0.9535	0.9179
         | 
| 6 | 
            +
            5	22:35:13	4	0.0000	0.16130743654362578	0.011488113552331924	0.9462	0.9631	0.9546	0.9199
         | 
| 7 | 
            +
            6	00:10:46	4	0.0000	0.15800901282123106	0.010615515522658825	0.9527	0.9649	0.9587	0.9273
         | 
| 8 | 
            +
            7	01:46:49	4	0.0000	0.1551098387415943	0.010866315104067326	0.9523	0.9673	0.9597	0.9289
         | 
| 9 | 
            +
            8	03:22:53	4	0.0000	0.1532771505682582	0.010451321490108967	0.9578	0.9657	0.9617	0.9324
         | 
| 10 | 
            +
            9	04:58:17	4	0.0000	0.15132318660084354	0.01064694207161665	0.9543	0.9677	0.9609	0.9309
         | 
| 11 | 
            +
            10	06:33:35	4	0.0000	0.14953294716354223	0.010687584988772869	0.9559	0.9689	0.9623	0.9336
         | 
| 12 | 
            +
            11	08:08:47	4	0.0000	0.14839159017834289	0.010935463011264801	0.9559	0.9683	0.962	0.9329
         | 
| 13 | 
            +
            12	09:43:47	4	0.0000	0.14699742678882574	0.011056484654545784	0.9587	0.9682	0.9634	0.9355
         | 
| 14 | 
            +
            13	11:14:57	4	0.0000	0.14604769509499	0.011409671977162361	0.9569	0.9687	0.9628	0.9342
         | 
| 15 | 
            +
            14	12:44:53	4	0.0000	0.14518390266473472	0.011419754475355148	0.9577	0.9697	0.9637	0.936
         | 
| 16 | 
            +
            15	14:24:07	4	0.0000	0.1443252341730405	0.011627680622041225	0.9582	0.9693	0.9637	0.9359
         | 
| 17 | 
            +
            16	16:05:36	4	0.0000	0.1435298321123121	0.011783876456320286	0.9601	0.9688	0.9644	0.9373
         | 
| 18 | 
            +
            17	17:46:47	4	0.0000	0.14356430696292566	0.011797642335295677	0.9595	0.9691	0.9643	0.9371
         | 
| 19 | 
            +
            18	19:28:18	4	0.0000	0.1423554343151879	0.011939478106796741	0.9588	0.9693	0.964	0.9365
         | 
| 20 | 
            +
            19	21:09:43	4	0.0000	0.14288157071177124	0.012016847729682922	0.9594	0.9692	0.9643	0.937
         | 
| 21 | 
            +
            20	22:51:01	4	0.0000	0.14238437937592288	0.012119622901082039	0.9589	0.9691	0.964	0.9366
         | 
    	
        test.tsv
    ADDED
    
    | The diff for this file is too large to render. 
		See raw diff | 
|  | 
    	
        training.log
    ADDED
    
    | @@ -0,0 +1,491 @@ | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | 
|  | |
| 1 | 
            +
            2022-10-09 14:35:47,018 ----------------------------------------------------------------------------------------------------
         | 
| 2 | 
            +
            2022-10-09 14:35:47,019 Model: "SequenceTagger(
         | 
| 3 | 
            +
              (embeddings): StackedEmbeddings(
         | 
| 4 | 
            +
                (list_embedding_0): TransformerWordEmbeddings(
         | 
| 5 | 
            +
                  (model): DistilBertModel(
         | 
| 6 | 
            +
                    (embeddings): Embeddings(
         | 
| 7 | 
            +
                      (word_embeddings): Embedding(28996, 768, padding_idx=0)
         | 
| 8 | 
            +
                      (position_embeddings): Embedding(512, 768)
         | 
| 9 | 
            +
                      (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
         | 
| 10 | 
            +
                      (dropout): Dropout(p=0.1, inplace=False)
         | 
| 11 | 
            +
                    )
         | 
| 12 | 
            +
                    (transformer): Transformer(
         | 
| 13 | 
            +
                      (layer): ModuleList(
         | 
| 14 | 
            +
                        (0): TransformerBlock(
         | 
| 15 | 
            +
                          (attention): MultiHeadSelfAttention(
         | 
| 16 | 
            +
                            (dropout): Dropout(p=0.1, inplace=False)
         | 
| 17 | 
            +
                            (q_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 18 | 
            +
                            (k_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 19 | 
            +
                            (v_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 20 | 
            +
                            (out_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 21 | 
            +
                          )
         | 
| 22 | 
            +
                          (sa_layer_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
         | 
| 23 | 
            +
                          (ffn): FFN(
         | 
| 24 | 
            +
                            (dropout): Dropout(p=0.1, inplace=False)
         | 
| 25 | 
            +
                            (lin1): Linear(in_features=768, out_features=3072, bias=True)
         | 
| 26 | 
            +
                            (lin2): Linear(in_features=3072, out_features=768, bias=True)
         | 
| 27 | 
            +
                            (activation): GELUActivation()
         | 
| 28 | 
            +
                          )
         | 
| 29 | 
            +
                          (output_layer_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
         | 
| 30 | 
            +
                        )
         | 
| 31 | 
            +
                        (1): TransformerBlock(
         | 
| 32 | 
            +
                          (attention): MultiHeadSelfAttention(
         | 
| 33 | 
            +
                            (dropout): Dropout(p=0.1, inplace=False)
         | 
| 34 | 
            +
                            (q_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 35 | 
            +
                            (k_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 36 | 
            +
                            (v_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 37 | 
            +
                            (out_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 38 | 
            +
                          )
         | 
| 39 | 
            +
                          (sa_layer_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
         | 
| 40 | 
            +
                          (ffn): FFN(
         | 
| 41 | 
            +
                            (dropout): Dropout(p=0.1, inplace=False)
         | 
| 42 | 
            +
                            (lin1): Linear(in_features=768, out_features=3072, bias=True)
         | 
| 43 | 
            +
                            (lin2): Linear(in_features=3072, out_features=768, bias=True)
         | 
| 44 | 
            +
                            (activation): GELUActivation()
         | 
| 45 | 
            +
                          )
         | 
| 46 | 
            +
                          (output_layer_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
         | 
| 47 | 
            +
                        )
         | 
| 48 | 
            +
                        (2): TransformerBlock(
         | 
| 49 | 
            +
                          (attention): MultiHeadSelfAttention(
         | 
| 50 | 
            +
                            (dropout): Dropout(p=0.1, inplace=False)
         | 
| 51 | 
            +
                            (q_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 52 | 
            +
                            (k_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 53 | 
            +
                            (v_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 54 | 
            +
                            (out_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 55 | 
            +
                          )
         | 
| 56 | 
            +
                          (sa_layer_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
         | 
| 57 | 
            +
                          (ffn): FFN(
         | 
| 58 | 
            +
                            (dropout): Dropout(p=0.1, inplace=False)
         | 
| 59 | 
            +
                            (lin1): Linear(in_features=768, out_features=3072, bias=True)
         | 
| 60 | 
            +
                            (lin2): Linear(in_features=3072, out_features=768, bias=True)
         | 
| 61 | 
            +
                            (activation): GELUActivation()
         | 
| 62 | 
            +
                          )
         | 
| 63 | 
            +
                          (output_layer_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
         | 
| 64 | 
            +
                        )
         | 
| 65 | 
            +
                        (3): TransformerBlock(
         | 
| 66 | 
            +
                          (attention): MultiHeadSelfAttention(
         | 
| 67 | 
            +
                            (dropout): Dropout(p=0.1, inplace=False)
         | 
| 68 | 
            +
                            (q_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 69 | 
            +
                            (k_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 70 | 
            +
                            (v_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 71 | 
            +
                            (out_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 72 | 
            +
                          )
         | 
| 73 | 
            +
                          (sa_layer_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
         | 
| 74 | 
            +
                          (ffn): FFN(
         | 
| 75 | 
            +
                            (dropout): Dropout(p=0.1, inplace=False)
         | 
| 76 | 
            +
                            (lin1): Linear(in_features=768, out_features=3072, bias=True)
         | 
| 77 | 
            +
                            (lin2): Linear(in_features=3072, out_features=768, bias=True)
         | 
| 78 | 
            +
                            (activation): GELUActivation()
         | 
| 79 | 
            +
                          )
         | 
| 80 | 
            +
                          (output_layer_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
         | 
| 81 | 
            +
                        )
         | 
| 82 | 
            +
                        (4): TransformerBlock(
         | 
| 83 | 
            +
                          (attention): MultiHeadSelfAttention(
         | 
| 84 | 
            +
                            (dropout): Dropout(p=0.1, inplace=False)
         | 
| 85 | 
            +
                            (q_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 86 | 
            +
                            (k_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 87 | 
            +
                            (v_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 88 | 
            +
                            (out_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 89 | 
            +
                          )
         | 
| 90 | 
            +
                          (sa_layer_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
         | 
| 91 | 
            +
                          (ffn): FFN(
         | 
| 92 | 
            +
                            (dropout): Dropout(p=0.1, inplace=False)
         | 
| 93 | 
            +
                            (lin1): Linear(in_features=768, out_features=3072, bias=True)
         | 
| 94 | 
            +
                            (lin2): Linear(in_features=3072, out_features=768, bias=True)
         | 
| 95 | 
            +
                            (activation): GELUActivation()
         | 
| 96 | 
            +
                          )
         | 
| 97 | 
            +
                          (output_layer_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
         | 
| 98 | 
            +
                        )
         | 
| 99 | 
            +
                        (5): TransformerBlock(
         | 
| 100 | 
            +
                          (attention): MultiHeadSelfAttention(
         | 
| 101 | 
            +
                            (dropout): Dropout(p=0.1, inplace=False)
         | 
| 102 | 
            +
                            (q_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 103 | 
            +
                            (k_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 104 | 
            +
                            (v_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 105 | 
            +
                            (out_lin): Linear(in_features=768, out_features=768, bias=True)
         | 
| 106 | 
            +
                          )
         | 
| 107 | 
            +
                          (sa_layer_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
         | 
| 108 | 
            +
                          (ffn): FFN(
         | 
| 109 | 
            +
                            (dropout): Dropout(p=0.1, inplace=False)
         | 
| 110 | 
            +
                            (lin1): Linear(in_features=768, out_features=3072, bias=True)
         | 
| 111 | 
            +
                            (lin2): Linear(in_features=3072, out_features=768, bias=True)
         | 
| 112 | 
            +
                            (activation): GELUActivation()
         | 
| 113 | 
            +
                          )
         | 
| 114 | 
            +
                          (output_layer_norm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)
         | 
| 115 | 
            +
                        )
         | 
| 116 | 
            +
                      )
         | 
| 117 | 
            +
                    )
         | 
| 118 | 
            +
                  )
         | 
| 119 | 
            +
                )
         | 
| 120 | 
            +
              )
         | 
| 121 | 
            +
              (word_dropout): WordDropout(p=0.05)
         | 
| 122 | 
            +
              (locked_dropout): LockedDropout(p=0.5)
         | 
| 123 | 
            +
              (linear): Linear(in_features=768, out_features=21, bias=True)
         | 
| 124 | 
            +
              (loss_function): CrossEntropyLoss()
         | 
| 125 | 
            +
            )"
         | 
| 126 | 
            +
            2022-10-09 14:35:47,020 ----------------------------------------------------------------------------------------------------
         | 
| 127 | 
            +
            2022-10-09 14:35:47,020 Corpus: "MultiCorpus: 126439 train + 28967 dev + 17625 test sentences
         | 
| 128 | 
            +
             - ColumnCorpus Corpus: 14896 train + 3444 dev + 3679 test sentences - ./
         | 
| 129 | 
            +
             - ColumnCorpus Corpus: 1491 train + 166 dev + 184 test sentences - ./
         | 
| 130 | 
            +
             - ColumnCorpus Corpus: 65087 train + 18419 dev + 9176 test sentences - ./datasets
         | 
| 131 | 
            +
             - ColumnCorpus Corpus: 44965 train + 6938 dev + 4586 test sentences - ./"
         | 
| 132 | 
            +
            2022-10-09 14:35:47,020 ----------------------------------------------------------------------------------------------------
         | 
| 133 | 
            +
            2022-10-09 14:35:47,020 Parameters:
         | 
| 134 | 
            +
            2022-10-09 14:35:47,020  - learning_rate: "0.000005"
         | 
| 135 | 
            +
            2022-10-09 14:35:47,020  - mini_batch_size: "32"
         | 
| 136 | 
            +
            2022-10-09 14:35:47,020  - patience: "3"
         | 
| 137 | 
            +
            2022-10-09 14:35:47,020  - anneal_factor: "0.5"
         | 
| 138 | 
            +
            2022-10-09 14:35:47,020  - max_epochs: "20"
         | 
| 139 | 
            +
            2022-10-09 14:35:47,020  - shuffle: "True"
         | 
| 140 | 
            +
            2022-10-09 14:35:47,020  - train_with_dev: "False"
         | 
| 141 | 
            +
            2022-10-09 14:35:47,021  - batch_growth_annealing: "False"
         | 
| 142 | 
            +
            2022-10-09 14:35:47,021 ----------------------------------------------------------------------------------------------------
         | 
| 143 | 
            +
            2022-10-09 14:35:47,021 Model training base path: "resources/taggers/privy-flair-transformers"
         | 
| 144 | 
            +
            2022-10-09 14:35:47,021 ----------------------------------------------------------------------------------------------------
         | 
| 145 | 
            +
            2022-10-09 14:35:47,021 Device: cuda:0
         | 
| 146 | 
            +
            2022-10-09 14:35:47,021 ----------------------------------------------------------------------------------------------------
         | 
| 147 | 
            +
            2022-10-09 14:35:47,021 Embeddings storage mode: none
         | 
| 148 | 
            +
            2022-10-09 14:35:47,021 ----------------------------------------------------------------------------------------------------
         | 
| 149 | 
            +
            2022-10-09 14:41:45,282 epoch 1 - iter 395/3952 - loss 3.32419044 - samples/sec: 35.64 - lr: 0.000000
         | 
| 150 | 
            +
            2022-10-09 14:50:42,225 epoch 1 - iter 790/3952 - loss 1.82346877 - samples/sec: 24.23 - lr: 0.000000
         | 
| 151 | 
            +
            2022-10-09 15:00:44,300 epoch 1 - iter 1185/3952 - loss 1.06483796 - samples/sec: 21.66 - lr: 0.000001
         | 
| 152 | 
            +
            2022-10-09 15:10:53,476 epoch 1 - iter 1580/3952 - loss 0.79311831 - samples/sec: 21.46 - lr: 0.000001
         | 
| 153 | 
            +
            2022-10-09 15:20:53,647 epoch 1 - iter 1975/3952 - loss 0.65220017 - samples/sec: 21.79 - lr: 0.000001
         | 
| 154 | 
            +
            2022-10-09 15:30:48,260 epoch 1 - iter 2370/3952 - loss 0.56201630 - samples/sec: 21.92 - lr: 0.000001
         | 
| 155 | 
            +
            2022-10-09 15:38:37,611 epoch 1 - iter 2765/3952 - loss 0.53726885 - samples/sec: 27.75 - lr: 0.000002
         | 
| 156 | 
            +
            2022-10-09 15:44:53,320 epoch 1 - iter 3160/3952 - loss 0.53328468 - samples/sec: 34.63 - lr: 0.000002
         | 
| 157 | 
            +
            2022-10-09 15:51:16,972 epoch 1 - iter 3555/3952 - loss 0.52470503 - samples/sec: 33.80 - lr: 0.000002
         | 
| 158 | 
            +
            2022-10-09 15:57:35,830 epoch 1 - iter 3950/3952 - loss 0.51681052 - samples/sec: 34.26 - lr: 0.000002
         | 
| 159 | 
            +
            2022-10-09 15:57:37,275 ----------------------------------------------------------------------------------------------------
         | 
| 160 | 
            +
            2022-10-09 15:57:37,275 EPOCH 1 done: loss 0.5168 - lr 0.000002
         | 
| 161 | 
            +
            2022-10-09 16:12:59,483 Evaluating as a multi-label problem: False
         | 
| 162 | 
            +
            2022-10-09 16:12:59,975 DEV : loss 0.07261496782302856 - f1-score (micro avg)  0.7003
         | 
| 163 | 
            +
            2022-10-09 16:13:31,047 BAD EPOCHS (no improvement): 4
         | 
| 164 | 
            +
            2022-10-09 16:13:31,940 ----------------------------------------------------------------------------------------------------
         | 
| 165 | 
            +
            2022-10-09 16:21:39,781 epoch 2 - iter 395/3952 - loss 0.21218652 - samples/sec: 26.89 - lr: 0.000003
         | 
| 166 | 
            +
            2022-10-09 16:29:46,206 epoch 2 - iter 790/3952 - loss 0.20655105 - samples/sec: 26.79 - lr: 0.000003
         | 
| 167 | 
            +
            2022-10-09 16:37:52,936 epoch 2 - iter 1185/3952 - loss 0.20259102 - samples/sec: 26.73 - lr: 0.000003
         | 
| 168 | 
            +
            2022-10-09 16:45:58,507 epoch 2 - iter 1580/3952 - loss 0.20005535 - samples/sec: 26.81 - lr: 0.000003
         | 
| 169 | 
            +
            2022-10-09 16:53:52,122 epoch 2 - iter 1975/3952 - loss 0.19747189 - samples/sec: 27.49 - lr: 0.000004
         | 
| 170 | 
            +
            2022-10-09 17:01:39,634 epoch 2 - iter 2370/3952 - loss 0.19566392 - samples/sec: 27.76 - lr: 0.000004
         | 
| 171 | 
            +
            2022-10-09 17:09:30,471 epoch 2 - iter 2765/3952 - loss 0.19386487 - samples/sec: 27.73 - lr: 0.000004
         | 
| 172 | 
            +
            2022-10-09 17:17:22,096 epoch 2 - iter 3160/3952 - loss 0.19249352 - samples/sec: 27.64 - lr: 0.000004
         | 
| 173 | 
            +
            2022-10-09 17:25:14,518 epoch 2 - iter 3555/3952 - loss 0.19133590 - samples/sec: 27.53 - lr: 0.000005
         | 
| 174 | 
            +
            2022-10-09 17:33:11,367 epoch 2 - iter 3950/3952 - loss 0.19002868 - samples/sec: 27.17 - lr: 0.000005
         | 
| 175 | 
            +
            2022-10-09 17:33:13,051 ----------------------------------------------------------------------------------------------------
         | 
| 176 | 
            +
            2022-10-09 17:33:13,051 EPOCH 2 done: loss 0.1900 - lr 0.000005
         | 
| 177 | 
            +
            2022-10-09 17:48:24,882 Evaluating as a multi-label problem: False
         | 
| 178 | 
            +
            2022-10-09 17:48:25,337 DEV : loss 0.019461628049612045 - f1-score (micro avg)  0.9258
         | 
| 179 | 
            +
            2022-10-09 17:48:56,512 BAD EPOCHS (no improvement): 4
         | 
| 180 | 
            +
            2022-10-09 17:48:57,426 ----------------------------------------------------------------------------------------------------
         | 
| 181 | 
            +
            2022-10-09 17:57:02,355 epoch 3 - iter 395/3952 - loss 0.17847621 - samples/sec: 26.73 - lr: 0.000005
         | 
| 182 | 
            +
            2022-10-09 18:05:03,813 epoch 3 - iter 790/3952 - loss 0.17591453 - samples/sec: 27.01 - lr: 0.000005
         | 
| 183 | 
            +
            2022-10-09 18:13:04,328 epoch 3 - iter 1185/3952 - loss 0.17539734 - samples/sec: 27.16 - lr: 0.000005
         | 
| 184 | 
            +
            2022-10-09 18:21:04,749 epoch 3 - iter 1580/3952 - loss 0.17471956 - samples/sec: 27.12 - lr: 0.000005
         | 
| 185 | 
            +
            2022-10-09 18:29:08,781 epoch 3 - iter 1975/3952 - loss 0.17411210 - samples/sec: 26.88 - lr: 0.000005
         | 
| 186 | 
            +
            2022-10-09 18:37:08,694 epoch 3 - iter 2370/3952 - loss 0.17357470 - samples/sec: 27.08 - lr: 0.000005
         | 
| 187 | 
            +
            2022-10-09 18:45:04,417 epoch 3 - iter 2765/3952 - loss 0.17312875 - samples/sec: 27.38 - lr: 0.000005
         | 
| 188 | 
            +
            2022-10-09 18:53:02,063 epoch 3 - iter 3160/3952 - loss 0.17256571 - samples/sec: 27.23 - lr: 0.000005
         | 
| 189 | 
            +
            2022-10-09 19:01:03,502 epoch 3 - iter 3555/3952 - loss 0.17219397 - samples/sec: 26.99 - lr: 0.000005
         | 
| 190 | 
            +
            2022-10-09 19:09:01,525 epoch 3 - iter 3950/3952 - loss 0.17175673 - samples/sec: 27.18 - lr: 0.000005
         | 
| 191 | 
            +
            2022-10-09 19:09:02,937 ----------------------------------------------------------------------------------------------------
         | 
| 192 | 
            +
            2022-10-09 19:09:02,938 EPOCH 3 done: loss 0.1718 - lr 0.000005
         | 
| 193 | 
            +
            2022-10-09 19:24:14,202 Evaluating as a multi-label problem: False
         | 
| 194 | 
            +
            2022-10-09 19:24:14,636 DEV : loss 0.0132540138438344 - f1-score (micro avg)  0.9449
         | 
| 195 | 
            +
            2022-10-09 19:24:46,251 BAD EPOCHS (no improvement): 4
         | 
| 196 | 
            +
            2022-10-09 19:24:47,160 ----------------------------------------------------------------------------------------------------
         | 
| 197 | 
            +
            2022-10-09 19:32:49,924 epoch 4 - iter 395/3952 - loss 0.16804626 - samples/sec: 27.13 - lr: 0.000005
         | 
| 198 | 
            +
            2022-10-09 19:40:52,439 epoch 4 - iter 790/3952 - loss 0.16663174 - samples/sec: 26.93 - lr: 0.000005
         | 
| 199 | 
            +
            2022-10-09 19:48:52,963 epoch 4 - iter 1185/3952 - loss 0.16647828 - samples/sec: 26.96 - lr: 0.000005
         | 
| 200 | 
            +
            2022-10-09 19:56:51,613 epoch 4 - iter 1580/3952 - loss 0.16639047 - samples/sec: 27.16 - lr: 0.000005
         | 
| 201 | 
            +
            2022-10-09 20:04:52,753 epoch 4 - iter 1975/3952 - loss 0.16657475 - samples/sec: 27.08 - lr: 0.000005
         | 
| 202 | 
            +
            2022-10-09 20:12:54,756 epoch 4 - iter 2370/3952 - loss 0.16632582 - samples/sec: 27.01 - lr: 0.000005
         | 
| 203 | 
            +
            2022-10-09 20:20:58,248 epoch 4 - iter 2765/3952 - loss 0.16578692 - samples/sec: 26.90 - lr: 0.000005
         | 
| 204 | 
            +
            2022-10-09 20:28:56,132 epoch 4 - iter 3160/3952 - loss 0.16538230 - samples/sec: 27.31 - lr: 0.000005
         | 
| 205 | 
            +
            2022-10-09 20:37:02,675 epoch 4 - iter 3555/3952 - loss 0.16519531 - samples/sec: 26.71 - lr: 0.000004
         | 
| 206 | 
            +
            2022-10-09 20:45:04,375 epoch 4 - iter 3950/3952 - loss 0.16516842 - samples/sec: 27.05 - lr: 0.000004
         | 
| 207 | 
            +
            2022-10-09 20:45:05,820 ----------------------------------------------------------------------------------------------------
         | 
| 208 | 
            +
            2022-10-09 20:45:05,821 EPOCH 4 done: loss 0.1652 - lr 0.000004
         | 
| 209 | 
            +
            2022-10-09 21:00:18,495 Evaluating as a multi-label problem: False
         | 
| 210 | 
            +
            2022-10-09 21:00:18,914 DEV : loss 0.011177059262990952 - f1-score (micro avg)  0.9535
         | 
| 211 | 
            +
            2022-10-09 21:00:51,130 BAD EPOCHS (no improvement): 4
         | 
| 212 | 
            +
            2022-10-09 21:00:52,018 ----------------------------------------------------------------------------------------------------
         | 
| 213 | 
            +
            2022-10-09 21:08:44,713 epoch 5 - iter 395/3952 - loss 0.16331808 - samples/sec: 27.41 - lr: 0.000004
         | 
| 214 | 
            +
            2022-10-09 21:16:38,390 epoch 5 - iter 790/3952 - loss 0.16221079 - samples/sec: 27.45 - lr: 0.000004
         | 
| 215 | 
            +
            2022-10-09 21:24:27,598 epoch 5 - iter 1185/3952 - loss 0.16205464 - samples/sec: 27.72 - lr: 0.000004
         | 
| 216 | 
            +
            2022-10-09 21:32:21,639 epoch 5 - iter 1580/3952 - loss 0.16189961 - samples/sec: 27.42 - lr: 0.000004
         | 
| 217 | 
            +
            2022-10-09 21:40:09,976 epoch 5 - iter 1975/3952 - loss 0.16206946 - samples/sec: 27.79 - lr: 0.000004
         | 
| 218 | 
            +
            2022-10-09 21:48:02,577 epoch 5 - iter 2370/3952 - loss 0.16196815 - samples/sec: 27.47 - lr: 0.000004
         | 
| 219 | 
            +
            2022-10-09 21:55:56,886 epoch 5 - iter 2765/3952 - loss 0.16172381 - samples/sec: 27.34 - lr: 0.000004
         | 
| 220 | 
            +
            2022-10-09 22:03:49,873 epoch 5 - iter 3160/3952 - loss 0.16156487 - samples/sec: 27.47 - lr: 0.000004
         | 
| 221 | 
            +
            2022-10-09 22:11:41,572 epoch 5 - iter 3555/3952 - loss 0.16147326 - samples/sec: 27.64 - lr: 0.000004
         | 
| 222 | 
            +
            2022-10-09 22:19:35,682 epoch 5 - iter 3950/3952 - loss 0.16130607 - samples/sec: 27.55 - lr: 0.000004
         | 
| 223 | 
            +
            2022-10-09 22:19:37,189 ----------------------------------------------------------------------------------------------------
         | 
| 224 | 
            +
            2022-10-09 22:19:37,189 EPOCH 5 done: loss 0.1613 - lr 0.000004
         | 
| 225 | 
            +
            2022-10-09 22:34:40,766 Evaluating as a multi-label problem: False
         | 
| 226 | 
            +
            2022-10-09 22:34:41,185 DEV : loss 0.011488113552331924 - f1-score (micro avg)  0.9546
         | 
| 227 | 
            +
            2022-10-09 22:35:13,419 BAD EPOCHS (no improvement): 4
         | 
| 228 | 
            +
            2022-10-09 22:35:14,308 ----------------------------------------------------------------------------------------------------
         | 
| 229 | 
            +
            2022-10-09 22:43:06,921 epoch 6 - iter 395/3952 - loss 0.16063155 - samples/sec: 27.36 - lr: 0.000004
         | 
| 230 | 
            +
            2022-10-09 22:50:59,349 epoch 6 - iter 790/3952 - loss 0.15984298 - samples/sec: 27.55 - lr: 0.000004
         | 
| 231 | 
            +
            2022-10-09 22:58:46,877 epoch 6 - iter 1185/3952 - loss 0.15946325 - samples/sec: 27.80 - lr: 0.000004
         | 
| 232 | 
            +
            2022-10-09 23:06:50,203 epoch 6 - iter 1580/3952 - loss 0.15917470 - samples/sec: 26.82 - lr: 0.000004
         | 
| 233 | 
            +
            2022-10-09 23:14:51,992 epoch 6 - iter 1975/3952 - loss 0.15882030 - samples/sec: 27.09 - lr: 0.000004
         | 
| 234 | 
            +
            2022-10-09 23:22:52,029 epoch 6 - iter 2370/3952 - loss 0.15876178 - samples/sec: 27.05 - lr: 0.000004
         | 
| 235 | 
            +
            2022-10-09 23:30:58,678 epoch 6 - iter 2765/3952 - loss 0.15864742 - samples/sec: 26.76 - lr: 0.000004
         | 
| 236 | 
            +
            2022-10-09 23:38:57,773 epoch 6 - iter 3160/3952 - loss 0.15842630 - samples/sec: 27.18 - lr: 0.000004
         | 
| 237 | 
            +
            2022-10-09 23:46:58,724 epoch 6 - iter 3555/3952 - loss 0.15814869 - samples/sec: 27.02 - lr: 0.000004
         | 
| 238 | 
            +
            2022-10-09 23:55:00,979 epoch 6 - iter 3950/3952 - loss 0.15800367 - samples/sec: 27.09 - lr: 0.000004
         | 
| 239 | 
            +
            2022-10-09 23:55:02,321 ----------------------------------------------------------------------------------------------------
         | 
| 240 | 
            +
            2022-10-09 23:55:02,321 EPOCH 6 done: loss 0.1580 - lr 0.000004
         | 
| 241 | 
            +
            2022-10-10 00:10:14,450 Evaluating as a multi-label problem: False
         | 
| 242 | 
            +
            2022-10-10 00:10:14,910 DEV : loss 0.010615515522658825 - f1-score (micro avg)  0.9587
         | 
| 243 | 
            +
            2022-10-10 00:10:46,276 BAD EPOCHS (no improvement): 4
         | 
| 244 | 
            +
            2022-10-10 00:10:47,232 ----------------------------------------------------------------------------------------------------
         | 
| 245 | 
            +
            2022-10-10 00:18:56,370 epoch 7 - iter 395/3952 - loss 0.15533572 - samples/sec: 26.74 - lr: 0.000004
         | 
| 246 | 
            +
            2022-10-10 00:26:53,533 epoch 7 - iter 790/3952 - loss 0.15567018 - samples/sec: 27.29 - lr: 0.000004
         | 
| 247 | 
            +
            2022-10-10 00:34:55,929 epoch 7 - iter 1185/3952 - loss 0.15559902 - samples/sec: 26.92 - lr: 0.000004
         | 
| 248 | 
            +
            2022-10-10 00:42:56,064 epoch 7 - iter 1580/3952 - loss 0.15526644 - samples/sec: 27.04 - lr: 0.000004
         | 
| 249 | 
            +
            2022-10-10 00:50:56,575 epoch 7 - iter 1975/3952 - loss 0.15532544 - samples/sec: 27.04 - lr: 0.000004
         | 
| 250 | 
            +
            2022-10-10 00:58:55,726 epoch 7 - iter 2370/3952 - loss 0.15538178 - samples/sec: 27.14 - lr: 0.000004
         | 
| 251 | 
            +
            2022-10-10 01:06:54,255 epoch 7 - iter 2765/3952 - loss 0.15537470 - samples/sec: 27.15 - lr: 0.000004
         | 
| 252 | 
            +
            2022-10-10 01:14:59,643 epoch 7 - iter 3160/3952 - loss 0.15531628 - samples/sec: 26.79 - lr: 0.000004
         | 
| 253 | 
            +
            2022-10-10 01:23:03,037 epoch 7 - iter 3555/3952 - loss 0.15533451 - samples/sec: 26.86 - lr: 0.000004
         | 
| 254 | 
            +
            2022-10-10 01:31:03,511 epoch 7 - iter 3950/3952 - loss 0.15511299 - samples/sec: 26.97 - lr: 0.000004
         | 
| 255 | 
            +
            2022-10-10 01:31:05,040 ----------------------------------------------------------------------------------------------------
         | 
| 256 | 
            +
            2022-10-10 01:31:05,041 EPOCH 7 done: loss 0.1551 - lr 0.000004
         | 
| 257 | 
            +
            2022-10-10 01:46:17,630 Evaluating as a multi-label problem: False
         | 
| 258 | 
            +
            2022-10-10 01:46:18,057 DEV : loss 0.010866315104067326 - f1-score (micro avg)  0.9597
         | 
| 259 | 
            +
            2022-10-10 01:46:49,834 BAD EPOCHS (no improvement): 4
         | 
| 260 | 
            +
            2022-10-10 01:46:50,741 ----------------------------------------------------------------------------------------------------
         | 
| 261 | 
            +
            2022-10-10 01:54:49,387 epoch 8 - iter 395/3952 - loss 0.15339956 - samples/sec: 27.34 - lr: 0.000004
         | 
| 262 | 
            +
            2022-10-10 02:02:54,436 epoch 8 - iter 790/3952 - loss 0.15357118 - samples/sec: 26.88 - lr: 0.000004
         | 
| 263 | 
            +
            2022-10-10 02:10:57,380 epoch 8 - iter 1185/3952 - loss 0.15383618 - samples/sec: 26.86 - lr: 0.000004
         | 
| 264 | 
            +
            2022-10-10 02:18:57,413 epoch 8 - iter 1580/3952 - loss 0.15388423 - samples/sec: 27.15 - lr: 0.000004
         | 
| 265 | 
            +
            2022-10-10 02:26:58,665 epoch 8 - iter 1975/3952 - loss 0.15366022 - samples/sec: 26.96 - lr: 0.000003
         | 
| 266 | 
            +
            2022-10-10 02:35:00,936 epoch 8 - iter 2370/3952 - loss 0.15388824 - samples/sec: 26.92 - lr: 0.000003
         | 
| 267 | 
            +
            2022-10-10 02:43:03,179 epoch 8 - iter 2765/3952 - loss 0.15380049 - samples/sec: 27.06 - lr: 0.000003
         | 
| 268 | 
            +
            2022-10-10 02:51:07,445 epoch 8 - iter 3160/3952 - loss 0.15356183 - samples/sec: 26.93 - lr: 0.000003
         | 
| 269 | 
            +
            2022-10-10 02:59:09,568 epoch 8 - iter 3555/3952 - loss 0.15337591 - samples/sec: 26.91 - lr: 0.000003
         | 
| 270 | 
            +
            2022-10-10 03:07:06,249 epoch 8 - iter 3950/3952 - loss 0.15327199 - samples/sec: 27.26 - lr: 0.000003
         | 
| 271 | 
            +
            2022-10-10 03:07:07,508 ----------------------------------------------------------------------------------------------------
         | 
| 272 | 
            +
            2022-10-10 03:07:07,509 EPOCH 8 done: loss 0.1533 - lr 0.000003
         | 
| 273 | 
            +
            2022-10-10 03:22:20,421 Evaluating as a multi-label problem: False
         | 
| 274 | 
            +
            2022-10-10 03:22:20,849 DEV : loss 0.010451321490108967 - f1-score (micro avg)  0.9617
         | 
| 275 | 
            +
            2022-10-10 03:22:53,399 BAD EPOCHS (no improvement): 4
         | 
| 276 | 
            +
            2022-10-10 03:22:55,354 ----------------------------------------------------------------------------------------------------
         | 
| 277 | 
            +
            2022-10-10 03:31:03,911 epoch 9 - iter 395/3952 - loss 0.15095455 - samples/sec: 26.52 - lr: 0.000003
         | 
| 278 | 
            +
            2022-10-10 03:39:03,919 epoch 9 - iter 790/3952 - loss 0.15100488 - samples/sec: 27.07 - lr: 0.000003
         | 
| 279 | 
            +
            2022-10-10 03:46:57,642 epoch 9 - iter 1185/3952 - loss 0.15141407 - samples/sec: 27.49 - lr: 0.000003
         | 
| 280 | 
            +
            2022-10-10 03:54:55,677 epoch 9 - iter 1580/3952 - loss 0.15153248 - samples/sec: 27.33 - lr: 0.000003
         | 
| 281 | 
            +
            2022-10-10 04:02:55,192 epoch 9 - iter 1975/3952 - loss 0.15137991 - samples/sec: 27.30 - lr: 0.000003
         | 
| 282 | 
            +
            2022-10-10 04:10:56,499 epoch 9 - iter 2370/3952 - loss 0.15134929 - samples/sec: 27.05 - lr: 0.000003
         | 
| 283 | 
            +
            2022-10-10 04:18:51,998 epoch 9 - iter 2765/3952 - loss 0.15139573 - samples/sec: 27.48 - lr: 0.000003
         | 
| 284 | 
            +
            2022-10-10 04:26:48,529 epoch 9 - iter 3160/3952 - loss 0.15141239 - samples/sec: 27.31 - lr: 0.000003
         | 
| 285 | 
            +
            2022-10-10 04:34:41,608 epoch 9 - iter 3555/3952 - loss 0.15135720 - samples/sec: 27.53 - lr: 0.000003
         | 
| 286 | 
            +
            2022-10-10 04:42:37,267 epoch 9 - iter 3950/3952 - loss 0.15132694 - samples/sec: 27.23 - lr: 0.000003
         | 
| 287 | 
            +
            2022-10-10 04:42:38,593 ----------------------------------------------------------------------------------------------------
         | 
| 288 | 
            +
            2022-10-10 04:42:38,594 EPOCH 9 done: loss 0.1513 - lr 0.000003
         | 
| 289 | 
            +
            2022-10-10 04:57:46,312 Evaluating as a multi-label problem: False
         | 
| 290 | 
            +
            2022-10-10 04:57:46,749 DEV : loss 0.01064694207161665 - f1-score (micro avg)  0.9609
         | 
| 291 | 
            +
            2022-10-10 04:58:17,984 BAD EPOCHS (no improvement): 4
         | 
| 292 | 
            +
            2022-10-10 04:58:18,878 ----------------------------------------------------------------------------------------------------
         | 
| 293 | 
            +
            2022-10-10 05:06:19,341 epoch 10 - iter 395/3952 - loss 0.14934098 - samples/sec: 27.00 - lr: 0.000003
         | 
| 294 | 
            +
            2022-10-10 05:14:13,052 epoch 10 - iter 790/3952 - loss 0.15047359 - samples/sec: 27.54 - lr: 0.000003
         | 
| 295 | 
            +
            2022-10-10 05:22:09,904 epoch 10 - iter 1185/3952 - loss 0.15005411 - samples/sec: 27.27 - lr: 0.000003
         | 
| 296 | 
            +
            2022-10-10 05:30:10,047 epoch 10 - iter 1580/3952 - loss 0.14970562 - samples/sec: 27.14 - lr: 0.000003
         | 
| 297 | 
            +
            2022-10-10 05:38:05,869 epoch 10 - iter 1975/3952 - loss 0.14954158 - samples/sec: 27.29 - lr: 0.000003
         | 
| 298 | 
            +
            2022-10-10 05:46:05,902 epoch 10 - iter 2370/3952 - loss 0.14932048 - samples/sec: 27.11 - lr: 0.000003
         | 
| 299 | 
            +
            2022-10-10 05:54:05,041 epoch 10 - iter 2765/3952 - loss 0.14927630 - samples/sec: 27.19 - lr: 0.000003
         | 
| 300 | 
            +
            2022-10-10 06:02:04,693 epoch 10 - iter 3160/3952 - loss 0.14935304 - samples/sec: 27.16 - lr: 0.000003
         | 
| 301 | 
            +
            2022-10-10 06:10:01,212 epoch 10 - iter 3555/3952 - loss 0.14941757 - samples/sec: 27.25 - lr: 0.000003
         | 
| 302 | 
            +
            2022-10-10 06:17:54,179 epoch 10 - iter 3950/3952 - loss 0.14953843 - samples/sec: 27.53 - lr: 0.000003
         | 
| 303 | 
            +
            2022-10-10 06:17:55,747 ----------------------------------------------------------------------------------------------------
         | 
| 304 | 
            +
            2022-10-10 06:17:55,747 EPOCH 10 done: loss 0.1495 - lr 0.000003
         | 
| 305 | 
            +
            2022-10-10 06:33:03,662 Evaluating as a multi-label problem: False
         | 
| 306 | 
            +
            2022-10-10 06:33:04,089 DEV : loss 0.010687584988772869 - f1-score (micro avg)  0.9623
         | 
| 307 | 
            +
            2022-10-10 06:33:35,248 BAD EPOCHS (no improvement): 4
         | 
| 308 | 
            +
            2022-10-10 06:33:36,135 ----------------------------------------------------------------------------------------------------
         | 
| 309 | 
            +
            2022-10-10 06:41:36,387 epoch 11 - iter 395/3952 - loss 0.14722548 - samples/sec: 27.24 - lr: 0.000003
         | 
| 310 | 
            +
            2022-10-10 06:49:32,701 epoch 11 - iter 790/3952 - loss 0.14792717 - samples/sec: 27.36 - lr: 0.000003
         | 
| 311 | 
            +
            2022-10-10 06:57:28,372 epoch 11 - iter 1185/3952 - loss 0.14804400 - samples/sec: 27.34 - lr: 0.000003
         | 
| 312 | 
            +
            2022-10-10 07:05:28,768 epoch 11 - iter 1580/3952 - loss 0.14822560 - samples/sec: 27.11 - lr: 0.000003
         | 
| 313 | 
            +
            2022-10-10 07:13:27,055 epoch 11 - iter 1975/3952 - loss 0.14845261 - samples/sec: 27.25 - lr: 0.000003
         | 
| 314 | 
            +
            2022-10-10 07:21:21,803 epoch 11 - iter 2370/3952 - loss 0.14860234 - samples/sec: 27.39 - lr: 0.000003
         | 
| 315 | 
            +
            2022-10-10 07:29:18,530 epoch 11 - iter 2765/3952 - loss 0.14881168 - samples/sec: 27.27 - lr: 0.000003
         | 
| 316 | 
            +
            2022-10-10 07:37:14,641 epoch 11 - iter 3160/3952 - loss 0.14859987 - samples/sec: 27.27 - lr: 0.000003
         | 
| 317 | 
            +
            2022-10-10 07:45:11,011 epoch 11 - iter 3555/3952 - loss 0.14841785 - samples/sec: 27.30 - lr: 0.000003
         | 
| 318 | 
            +
            2022-10-10 07:53:06,062 epoch 11 - iter 3950/3952 - loss 0.14839159 - samples/sec: 27.46 - lr: 0.000003
         | 
| 319 | 
            +
            2022-10-10 07:53:07,694 ----------------------------------------------------------------------------------------------------
         | 
| 320 | 
            +
            2022-10-10 07:53:07,694 EPOCH 11 done: loss 0.1484 - lr 0.000003
         | 
| 321 | 
            +
            2022-10-10 08:08:15,642 Evaluating as a multi-label problem: False
         | 
| 322 | 
            +
            2022-10-10 08:08:16,078 DEV : loss 0.010935463011264801 - f1-score (micro avg)  0.962
         | 
| 323 | 
            +
            2022-10-10 08:08:47,374 BAD EPOCHS (no improvement): 4
         | 
| 324 | 
            +
            2022-10-10 08:08:48,267 ----------------------------------------------------------------------------------------------------
         | 
| 325 | 
            +
            2022-10-10 08:16:43,768 epoch 12 - iter 395/3952 - loss 0.14779592 - samples/sec: 27.35 - lr: 0.000002
         | 
| 326 | 
            +
            2022-10-10 08:24:44,096 epoch 12 - iter 790/3952 - loss 0.14727136 - samples/sec: 27.09 - lr: 0.000002
         | 
| 327 | 
            +
            2022-10-10 08:32:40,480 epoch 12 - iter 1185/3952 - loss 0.14742119 - samples/sec: 27.30 - lr: 0.000002
         | 
| 328 | 
            +
            2022-10-10 08:40:34,998 epoch 12 - iter 1580/3952 - loss 0.14735918 - samples/sec: 27.41 - lr: 0.000002
         | 
| 329 | 
            +
            2022-10-10 08:48:29,447 epoch 12 - iter 1975/3952 - loss 0.14739904 - samples/sec: 27.37 - lr: 0.000002
         | 
| 330 | 
            +
            2022-10-10 08:56:21,930 epoch 12 - iter 2370/3952 - loss 0.14746441 - samples/sec: 27.64 - lr: 0.000002
         | 
| 331 | 
            +
            2022-10-10 09:04:20,566 epoch 12 - iter 2765/3952 - loss 0.14727131 - samples/sec: 27.27 - lr: 0.000002
         | 
| 332 | 
            +
            2022-10-10 09:12:17,286 epoch 12 - iter 3160/3952 - loss 0.14733990 - samples/sec: 27.30 - lr: 0.000002
         | 
| 333 | 
            +
            2022-10-10 09:20:14,749 epoch 12 - iter 3555/3952 - loss 0.14706041 - samples/sec: 27.28 - lr: 0.000002
         | 
| 334 | 
            +
            2022-10-10 09:28:08,079 epoch 12 - iter 3950/3952 - loss 0.14700556 - samples/sec: 27.52 - lr: 0.000002
         | 
| 335 | 
            +
            2022-10-10 09:28:09,718 ----------------------------------------------------------------------------------------------------
         | 
| 336 | 
            +
            2022-10-10 09:28:09,718 EPOCH 12 done: loss 0.1470 - lr 0.000002
         | 
| 337 | 
            +
            2022-10-10 09:43:14,910 Evaluating as a multi-label problem: False
         | 
| 338 | 
            +
            2022-10-10 09:43:15,334 DEV : loss 0.011056484654545784 - f1-score (micro avg)  0.9634
         | 
| 339 | 
            +
            2022-10-10 09:43:47,802 BAD EPOCHS (no improvement): 4
         | 
| 340 | 
            +
            2022-10-10 09:43:48,705 ----------------------------------------------------------------------------------------------------
         | 
| 341 | 
            +
            2022-10-10 09:51:46,179 epoch 13 - iter 395/3952 - loss 0.14506338 - samples/sec: 27.10 - lr: 0.000002
         | 
| 342 | 
            +
            2022-10-10 09:59:43,717 epoch 13 - iter 790/3952 - loss 0.14619048 - samples/sec: 27.23 - lr: 0.000002
         | 
| 343 | 
            +
            2022-10-10 10:07:33,958 epoch 13 - iter 1185/3952 - loss 0.14639748 - samples/sec: 27.70 - lr: 0.000002
         | 
| 344 | 
            +
            2022-10-10 10:15:01,211 epoch 13 - iter 1580/3952 - loss 0.14615405 - samples/sec: 29.14 - lr: 0.000002
         | 
| 345 | 
            +
            2022-10-10 10:22:17,577 epoch 13 - iter 1975/3952 - loss 0.14620482 - samples/sec: 29.92 - lr: 0.000002
         | 
| 346 | 
            +
            2022-10-10 10:29:43,376 epoch 13 - iter 2370/3952 - loss 0.14616699 - samples/sec: 29.29 - lr: 0.000002
         | 
| 347 | 
            +
            2022-10-10 10:37:06,729 epoch 13 - iter 2765/3952 - loss 0.14605036 - samples/sec: 29.39 - lr: 0.000002
         | 
| 348 | 
            +
            2022-10-10 10:44:37,315 epoch 13 - iter 3160/3952 - loss 0.14597794 - samples/sec: 28.97 - lr: 0.000002
         | 
| 349 | 
            +
            2022-10-10 10:52:01,383 epoch 13 - iter 3555/3952 - loss 0.14602289 - samples/sec: 29.36 - lr: 0.000002
         | 
| 350 | 
            +
            2022-10-10 10:59:26,413 epoch 13 - iter 3950/3952 - loss 0.14605007 - samples/sec: 29.23 - lr: 0.000002
         | 
| 351 | 
            +
            2022-10-10 10:59:27,781 ----------------------------------------------------------------------------------------------------
         | 
| 352 | 
            +
            2022-10-10 10:59:27,782 EPOCH 13 done: loss 0.1460 - lr 0.000002
         | 
| 353 | 
            +
            2022-10-10 11:14:26,437 Evaluating as a multi-label problem: False
         | 
| 354 | 
            +
            2022-10-10 11:14:26,846 DEV : loss 0.011409671977162361 - f1-score (micro avg)  0.9628
         | 
| 355 | 
            +
            2022-10-10 11:14:57,380 BAD EPOCHS (no improvement): 4
         | 
| 356 | 
            +
            2022-10-10 11:14:58,218 ----------------------------------------------------------------------------------------------------
         | 
| 357 | 
            +
            2022-10-10 11:22:28,388 epoch 14 - iter 395/3952 - loss 0.14532304 - samples/sec: 29.11 - lr: 0.000002
         | 
| 358 | 
            +
            2022-10-10 11:29:54,824 epoch 14 - iter 790/3952 - loss 0.14560920 - samples/sec: 29.11 - lr: 0.000002
         | 
| 359 | 
            +
            2022-10-10 11:37:22,235 epoch 14 - iter 1185/3952 - loss 0.14518057 - samples/sec: 29.05 - lr: 0.000002
         | 
| 360 | 
            +
            2022-10-10 11:44:50,891 epoch 14 - iter 1580/3952 - loss 0.14527092 - samples/sec: 28.98 - lr: 0.000002
         | 
| 361 | 
            +
            2022-10-10 11:52:18,549 epoch 14 - iter 1975/3952 - loss 0.14511930 - samples/sec: 29.20 - lr: 0.000002
         | 
| 362 | 
            +
            2022-10-10 11:59:56,465 epoch 14 - iter 2370/3952 - loss 0.14523496 - samples/sec: 28.44 - lr: 0.000002
         | 
| 363 | 
            +
            2022-10-10 12:07:18,925 epoch 14 - iter 2765/3952 - loss 0.14524068 - samples/sec: 29.46 - lr: 0.000002
         | 
| 364 | 
            +
            2022-10-10 12:14:42,038 epoch 14 - iter 3160/3952 - loss 0.14516594 - samples/sec: 29.36 - lr: 0.000002
         | 
| 365 | 
            +
            2022-10-10 12:22:07,540 epoch 14 - iter 3555/3952 - loss 0.14526955 - samples/sec: 29.18 - lr: 0.000002
         | 
| 366 | 
            +
            2022-10-10 12:29:36,124 epoch 14 - iter 3950/3952 - loss 0.14518783 - samples/sec: 29.17 - lr: 0.000002
         | 
| 367 | 
            +
            2022-10-10 12:29:37,533 ----------------------------------------------------------------------------------------------------
         | 
| 368 | 
            +
            2022-10-10 12:29:37,533 EPOCH 14 done: loss 0.1452 - lr 0.000002
         | 
| 369 | 
            +
            2022-10-10 12:44:22,577 Evaluating as a multi-label problem: False
         | 
| 370 | 
            +
            2022-10-10 12:44:22,990 DEV : loss 0.011419754475355148 - f1-score (micro avg)  0.9637
         | 
| 371 | 
            +
            2022-10-10 12:44:53,663 BAD EPOCHS (no improvement): 4
         | 
| 372 | 
            +
            2022-10-10 12:44:54,557 ----------------------------------------------------------------------------------------------------
         | 
| 373 | 
            +
            2022-10-10 12:52:25,951 epoch 15 - iter 395/3952 - loss 0.14181725 - samples/sec: 28.90 - lr: 0.000002
         | 
| 374 | 
            +
            2022-10-10 12:59:53,144 epoch 15 - iter 790/3952 - loss 0.14383060 - samples/sec: 29.20 - lr: 0.000002
         | 
| 375 | 
            +
            2022-10-10 13:08:18,071 epoch 15 - iter 1185/3952 - loss 0.14395256 - samples/sec: 25.82 - lr: 0.000002
         | 
| 376 | 
            +
            2022-10-10 13:16:43,174 epoch 15 - iter 1580/3952 - loss 0.14433998 - samples/sec: 25.78 - lr: 0.000002
         | 
| 377 | 
            +
            2022-10-10 13:25:14,818 epoch 15 - iter 1975/3952 - loss 0.14428390 - samples/sec: 25.37 - lr: 0.000002
         | 
| 378 | 
            +
            2022-10-10 13:33:46,506 epoch 15 - iter 2370/3952 - loss 0.14440542 - samples/sec: 25.41 - lr: 0.000002
         | 
| 379 | 
            +
            2022-10-10 13:42:09,041 epoch 15 - iter 2765/3952 - loss 0.14445593 - samples/sec: 26.01 - lr: 0.000001
         | 
| 380 | 
            +
            2022-10-10 13:50:39,620 epoch 15 - iter 3160/3952 - loss 0.14456461 - samples/sec: 25.44 - lr: 0.000001
         | 
| 381 | 
            +
            2022-10-10 13:59:09,404 epoch 15 - iter 3555/3952 - loss 0.14444586 - samples/sec: 25.61 - lr: 0.000001
         | 
| 382 | 
            +
            2022-10-10 14:07:41,706 epoch 15 - iter 3950/3952 - loss 0.14432217 - samples/sec: 25.45 - lr: 0.000001
         | 
| 383 | 
            +
            2022-10-10 14:07:43,149 ----------------------------------------------------------------------------------------------------
         | 
| 384 | 
            +
            2022-10-10 14:07:43,150 EPOCH 15 done: loss 0.1443 - lr 0.000001
         | 
| 385 | 
            +
            2022-10-10 14:23:33,181 Evaluating as a multi-label problem: False
         | 
| 386 | 
            +
            2022-10-10 14:23:33,654 DEV : loss 0.011627680622041225 - f1-score (micro avg)  0.9637
         | 
| 387 | 
            +
            2022-10-10 14:24:07,996 BAD EPOCHS (no improvement): 4
         | 
| 388 | 
            +
            2022-10-10 14:24:09,032 ----------------------------------------------------------------------------------------------------
         | 
| 389 | 
            +
            2022-10-10 14:32:40,414 epoch 16 - iter 395/3952 - loss 0.14350737 - samples/sec: 25.61 - lr: 0.000001
         | 
| 390 | 
            +
            2022-10-10 14:41:10,956 epoch 16 - iter 790/3952 - loss 0.14341419 - samples/sec: 25.59 - lr: 0.000001
         | 
| 391 | 
            +
            2022-10-10 14:49:40,914 epoch 16 - iter 1185/3952 - loss 0.14370127 - samples/sec: 25.52 - lr: 0.000001
         | 
| 392 | 
            +
            2022-10-10 14:58:09,406 epoch 16 - iter 1580/3952 - loss 0.14378459 - samples/sec: 25.57 - lr: 0.000001
         | 
| 393 | 
            +
            2022-10-10 15:06:40,193 epoch 16 - iter 1975/3952 - loss 0.14360404 - samples/sec: 25.52 - lr: 0.000001
         | 
| 394 | 
            +
            2022-10-10 15:15:11,603 epoch 16 - iter 2370/3952 - loss 0.14360062 - samples/sec: 25.44 - lr: 0.000001
         | 
| 395 | 
            +
            2022-10-10 15:23:44,499 epoch 16 - iter 2765/3952 - loss 0.14356139 - samples/sec: 25.37 - lr: 0.000001
         | 
| 396 | 
            +
            2022-10-10 15:32:14,460 epoch 16 - iter 3160/3952 - loss 0.14361871 - samples/sec: 25.48 - lr: 0.000001
         | 
| 397 | 
            +
            2022-10-10 15:40:46,346 epoch 16 - iter 3555/3952 - loss 0.14360176 - samples/sec: 25.51 - lr: 0.000001
         | 
| 398 | 
            +
            2022-10-10 15:49:16,072 epoch 16 - iter 3950/3952 - loss 0.14352181 - samples/sec: 25.55 - lr: 0.000001
         | 
| 399 | 
            +
            2022-10-10 15:49:18,082 ----------------------------------------------------------------------------------------------------
         | 
| 400 | 
            +
            2022-10-10 15:49:18,082 EPOCH 16 done: loss 0.1435 - lr 0.000001
         | 
| 401 | 
            +
            2022-10-10 16:05:01,512 Evaluating as a multi-label problem: False
         | 
| 402 | 
            +
            2022-10-10 16:05:01,984 DEV : loss 0.011783876456320286 - f1-score (micro avg)  0.9644
         | 
| 403 | 
            +
            2022-10-10 16:05:36,459 BAD EPOCHS (no improvement): 4
         | 
| 404 | 
            +
            2022-10-10 16:05:37,421 ----------------------------------------------------------------------------------------------------
         | 
| 405 | 
            +
            2022-10-10 16:14:08,530 epoch 17 - iter 395/3952 - loss 0.14367645 - samples/sec: 25.33 - lr: 0.000001
         | 
| 406 | 
            +
            2022-10-10 16:22:34,521 epoch 17 - iter 790/3952 - loss 0.14312751 - samples/sec: 25.71 - lr: 0.000001
         | 
| 407 | 
            +
            2022-10-10 16:31:01,690 epoch 17 - iter 1185/3952 - loss 0.14363484 - samples/sec: 25.68 - lr: 0.000001
         | 
| 408 | 
            +
            2022-10-10 16:39:26,318 epoch 17 - iter 1580/3952 - loss 0.14329122 - samples/sec: 25.77 - lr: 0.000001
         | 
| 409 | 
            +
            2022-10-10 16:47:51,245 epoch 17 - iter 1975/3952 - loss 0.14338973 - samples/sec: 25.84 - lr: 0.000001
         | 
| 410 | 
            +
            2022-10-10 16:56:18,671 epoch 17 - iter 2370/3952 - loss 0.14364105 - samples/sec: 25.62 - lr: 0.000001
         | 
| 411 | 
            +
            2022-10-10 17:04:48,817 epoch 17 - iter 2765/3952 - loss 0.14374600 - samples/sec: 25.48 - lr: 0.000001
         | 
| 412 | 
            +
            2022-10-10 17:13:21,802 epoch 17 - iter 3160/3952 - loss 0.14369645 - samples/sec: 25.31 - lr: 0.000001
         | 
| 413 | 
            +
            2022-10-10 17:21:51,309 epoch 17 - iter 3555/3952 - loss 0.14360598 - samples/sec: 25.59 - lr: 0.000001
         | 
| 414 | 
            +
            2022-10-10 17:30:20,509 epoch 17 - iter 3950/3952 - loss 0.14356029 - samples/sec: 25.54 - lr: 0.000001
         | 
| 415 | 
            +
            2022-10-10 17:30:22,113 ----------------------------------------------------------------------------------------------------
         | 
| 416 | 
            +
            2022-10-10 17:30:22,114 EPOCH 17 done: loss 0.1436 - lr 0.000001
         | 
| 417 | 
            +
            2022-10-10 17:46:12,566 Evaluating as a multi-label problem: False
         | 
| 418 | 
            +
            2022-10-10 17:46:13,046 DEV : loss 0.011797642335295677 - f1-score (micro avg)  0.9643
         | 
| 419 | 
            +
            2022-10-10 17:46:47,683 BAD EPOCHS (no improvement): 4
         | 
| 420 | 
            +
            2022-10-10 17:46:48,723 ----------------------------------------------------------------------------------------------------
         | 
| 421 | 
            +
            2022-10-10 17:55:28,142 epoch 18 - iter 395/3952 - loss 0.14306617 - samples/sec: 25.20 - lr: 0.000001
         | 
| 422 | 
            +
            2022-10-10 18:03:57,902 epoch 18 - iter 790/3952 - loss 0.14196615 - samples/sec: 25.53 - lr: 0.000001
         | 
| 423 | 
            +
            2022-10-10 18:12:31,453 epoch 18 - iter 1185/3952 - loss 0.14182625 - samples/sec: 25.38 - lr: 0.000001
         | 
| 424 | 
            +
            2022-10-10 18:20:57,991 epoch 18 - iter 1580/3952 - loss 0.14185926 - samples/sec: 25.62 - lr: 0.000001
         | 
| 425 | 
            +
            2022-10-10 18:29:28,131 epoch 18 - iter 1975/3952 - loss 0.14207068 - samples/sec: 25.46 - lr: 0.000001
         | 
| 426 | 
            +
            2022-10-10 18:37:54,888 epoch 18 - iter 2370/3952 - loss 0.14229279 - samples/sec: 25.71 - lr: 0.000001
         | 
| 427 | 
            +
            2022-10-10 18:46:22,698 epoch 18 - iter 2765/3952 - loss 0.14234187 - samples/sec: 25.65 - lr: 0.000001
         | 
| 428 | 
            +
            2022-10-10 18:54:50,839 epoch 18 - iter 3160/3952 - loss 0.14240556 - samples/sec: 25.65 - lr: 0.000001
         | 
| 429 | 
            +
            2022-10-10 19:03:22,482 epoch 18 - iter 3555/3952 - loss 0.14233153 - samples/sec: 25.48 - lr: 0.000001
         | 
| 430 | 
            +
            2022-10-10 19:11:53,854 epoch 18 - iter 3950/3952 - loss 0.14236278 - samples/sec: 25.30 - lr: 0.000001
         | 
| 431 | 
            +
            2022-10-10 19:11:56,073 ----------------------------------------------------------------------------------------------------
         | 
| 432 | 
            +
            2022-10-10 19:11:56,074 EPOCH 18 done: loss 0.1424 - lr 0.000001
         | 
| 433 | 
            +
            2022-10-10 19:27:45,449 Evaluating as a multi-label problem: False
         | 
| 434 | 
            +
            2022-10-10 19:27:45,930 DEV : loss 0.011939478106796741 - f1-score (micro avg)  0.964
         | 
| 435 | 
            +
            2022-10-10 19:28:18,875 BAD EPOCHS (no improvement): 4
         | 
| 436 | 
            +
            2022-10-10 19:28:19,941 ----------------------------------------------------------------------------------------------------
         | 
| 437 | 
            +
            2022-10-10 19:36:53,864 epoch 19 - iter 395/3952 - loss 0.14362086 - samples/sec: 25.29 - lr: 0.000001
         | 
| 438 | 
            +
            2022-10-10 19:45:24,479 epoch 19 - iter 790/3952 - loss 0.14325958 - samples/sec: 25.49 - lr: 0.000001
         | 
| 439 | 
            +
            2022-10-10 19:53:54,808 epoch 19 - iter 1185/3952 - loss 0.14310735 - samples/sec: 25.48 - lr: 0.000000
         | 
| 440 | 
            +
            2022-10-10 20:02:24,384 epoch 19 - iter 1580/3952 - loss 0.14293734 - samples/sec: 25.47 - lr: 0.000000
         | 
| 441 | 
            +
            2022-10-10 20:10:51,221 epoch 19 - iter 1975/3952 - loss 0.14306481 - samples/sec: 25.77 - lr: 0.000000
         | 
| 442 | 
            +
            2022-10-10 20:19:18,624 epoch 19 - iter 2370/3952 - loss 0.14291352 - samples/sec: 25.72 - lr: 0.000000
         | 
| 443 | 
            +
            2022-10-10 20:27:46,259 epoch 19 - iter 2765/3952 - loss 0.14298740 - samples/sec: 25.60 - lr: 0.000000
         | 
| 444 | 
            +
            2022-10-10 20:36:16,560 epoch 19 - iter 3160/3952 - loss 0.14288623 - samples/sec: 25.52 - lr: 0.000000
         | 
| 445 | 
            +
            2022-10-10 20:44:47,260 epoch 19 - iter 3555/3952 - loss 0.14282900 - samples/sec: 25.45 - lr: 0.000000
         | 
| 446 | 
            +
            2022-10-10 20:53:18,466 epoch 19 - iter 3950/3952 - loss 0.14288617 - samples/sec: 25.54 - lr: 0.000000
         | 
| 447 | 
            +
            2022-10-10 20:53:19,964 ----------------------------------------------------------------------------------------------------
         | 
| 448 | 
            +
            2022-10-10 20:53:19,964 EPOCH 19 done: loss 0.1429 - lr 0.000000
         | 
| 449 | 
            +
            2022-10-10 21:09:08,715 Evaluating as a multi-label problem: False
         | 
| 450 | 
            +
            2022-10-10 21:09:09,202 DEV : loss 0.012016847729682922 - f1-score (micro avg)  0.9643
         | 
| 451 | 
            +
            2022-10-10 21:09:43,778 BAD EPOCHS (no improvement): 4
         | 
| 452 | 
            +
            2022-10-10 21:09:44,810 ----------------------------------------------------------------------------------------------------
         | 
| 453 | 
            +
            2022-10-10 21:18:11,781 epoch 20 - iter 395/3952 - loss 0.14263110 - samples/sec: 25.65 - lr: 0.000000
         | 
| 454 | 
            +
            2022-10-10 21:26:40,891 epoch 20 - iter 790/3952 - loss 0.14225428 - samples/sec: 25.60 - lr: 0.000000
         | 
| 455 | 
            +
            2022-10-10 21:35:08,495 epoch 20 - iter 1185/3952 - loss 0.14205051 - samples/sec: 25.66 - lr: 0.000000
         | 
| 456 | 
            +
            2022-10-10 21:43:34,108 epoch 20 - iter 1580/3952 - loss 0.14228947 - samples/sec: 25.71 - lr: 0.000000
         | 
| 457 | 
            +
            2022-10-10 21:52:11,211 epoch 20 - iter 1975/3952 - loss 0.14209594 - samples/sec: 25.19 - lr: 0.000000
         | 
| 458 | 
            +
            2022-10-10 22:00:41,644 epoch 20 - iter 2370/3952 - loss 0.14227931 - samples/sec: 25.63 - lr: 0.000000
         | 
| 459 | 
            +
            2022-10-10 22:09:10,266 epoch 20 - iter 2765/3952 - loss 0.14254834 - samples/sec: 25.65 - lr: 0.000000
         | 
| 460 | 
            +
            2022-10-10 22:17:38,261 epoch 20 - iter 3160/3952 - loss 0.14259954 - samples/sec: 25.71 - lr: 0.000000
         | 
| 461 | 
            +
            2022-10-10 22:26:05,321 epoch 20 - iter 3555/3952 - loss 0.14252244 - samples/sec: 25.59 - lr: 0.000000
         | 
| 462 | 
            +
            2022-10-10 22:34:35,781 epoch 20 - iter 3950/3952 - loss 0.14238758 - samples/sec: 25.47 - lr: 0.000000
         | 
| 463 | 
            +
            2022-10-10 22:34:37,421 ----------------------------------------------------------------------------------------------------
         | 
| 464 | 
            +
            2022-10-10 22:34:37,422 EPOCH 20 done: loss 0.1424 - lr 0.000000
         | 
| 465 | 
            +
            2022-10-10 22:50:27,724 Evaluating as a multi-label problem: False
         | 
| 466 | 
            +
            2022-10-10 22:50:28,207 DEV : loss 0.012119622901082039 - f1-score (micro avg)  0.964
         | 
| 467 | 
            +
            2022-10-10 22:51:01,203 BAD EPOCHS (no improvement): 4
         | 
| 468 | 
            +
            2022-10-10 22:51:03,269 ----------------------------------------------------------------------------------------------------
         | 
| 469 | 
            +
            2022-10-10 22:51:03,271 Testing using last state of model ...
         | 
| 470 | 
            +
            2022-10-10 22:59:53,131 Evaluating as a multi-label problem: False
         | 
| 471 | 
            +
            2022-10-10 22:59:53,392 0.945	0.9596	0.9522	0.9179
         | 
| 472 | 
            +
            2022-10-10 22:59:53,392 
         | 
| 473 | 
            +
            Results:
         | 
| 474 | 
            +
            - F-score (micro) 0.9522
         | 
| 475 | 
            +
            - F-score (macro) 0.9468
         | 
| 476 | 
            +
            - Accuracy 0.9179
         | 
| 477 | 
            +
             | 
| 478 | 
            +
            By class:
         | 
| 479 | 
            +
                          precision    recall  f1-score   support
         | 
| 480 | 
            +
             | 
| 481 | 
            +
                     LOC     0.9643    0.9671    0.9657     11823
         | 
| 482 | 
            +
                     PER     0.9722    0.9736    0.9729      7836
         | 
| 483 | 
            +
               DATE_TIME     0.9152    0.9458    0.9303      4746
         | 
| 484 | 
            +
                     ORG     0.8720    0.9196    0.8952      4565
         | 
| 485 | 
            +
                     NRP     0.9633    0.9766    0.9699      2905
         | 
| 486 | 
            +
             | 
| 487 | 
            +
               micro avg     0.9450    0.9596    0.9522     31875
         | 
| 488 | 
            +
               macro avg     0.9374    0.9565    0.9468     31875
         | 
| 489 | 
            +
            weighted avg     0.9456    0.9596    0.9525     31875
         | 
| 490 | 
            +
             | 
| 491 | 
            +
            2022-10-10 22:59:53,392 ----------------------------------------------------------------------------------------------------
         | 
    	
        weights.txt
    ADDED
    
    | 
            File without changes
         |