philipp-zettl commited on
Commit
ceffb86
1 Parent(s): ba8d6d9

Add new SentenceTransformer model.

Browse files
Files changed (3) hide show
  1. README.md +370 -77
  2. config.json +1 -1
  3. model.safetensors +1 -1
README.md CHANGED
@@ -6,7 +6,7 @@ tags:
6
  - sentence-similarity
7
  - feature-extraction
8
  - generated_from_trainer
9
- - dataset_size:100000
10
  - loss:CoSENTLoss
11
  base_model: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2
12
  datasets: []
@@ -59,34 +59,34 @@ model-index:
59
  type: MiniLM-dev
60
  metrics:
61
  - type: pearson_cosine
62
- value: 0.8038546201162028
63
  name: Pearson Cosine
64
  - type: spearman_cosine
65
- value: 0.7952482154521802
66
  name: Spearman Cosine
67
  - type: pearson_manhattan
68
- value: 0.7802560869413135
69
  name: Pearson Manhattan
70
  - type: spearman_manhattan
71
- value: 0.7844203514438413
72
  name: Spearman Manhattan
73
  - type: pearson_euclidean
74
- value: 0.7833308118947437
75
  name: Pearson Euclidean
76
  - type: spearman_euclidean
77
- value: 0.7868069159395278
78
  name: Spearman Euclidean
79
  - type: pearson_dot
80
- value: 0.7390061538330888
81
  name: Pearson Dot
82
  - type: spearman_dot
83
- value: 0.7653163577056459
84
  name: Spearman Dot
85
  - type: pearson_max
86
- value: 0.8038546201162028
87
  name: Pearson Max
88
  - type: spearman_max
89
- value: 0.7952482154521802
90
  name: Spearman Max
91
  - task:
92
  type: semantic-similarity
@@ -96,34 +96,34 @@ model-index:
96
  type: MiniLM-test
97
  metrics:
98
  - type: pearson_cosine
99
- value: 0.8166405741503688
100
  name: Pearson Cosine
101
  - type: spearman_cosine
102
- value: 0.8039812454999457
103
  name: Spearman Cosine
104
  - type: pearson_manhattan
105
- value: 0.7971025234300892
106
  name: Pearson Manhattan
107
  - type: spearman_manhattan
108
- value: 0.7969688965888463
109
  name: Spearman Manhattan
110
  - type: pearson_euclidean
111
- value: 0.7993679672017562
112
  name: Pearson Euclidean
113
  - type: spearman_euclidean
114
- value: 0.7987110950686573
115
  name: Spearman Euclidean
116
  - type: pearson_dot
117
- value: 0.745396423140658
118
  name: Pearson Dot
119
  - type: spearman_dot
120
- value: 0.7692160449143377
121
  name: Spearman Dot
122
  - type: pearson_max
123
- value: 0.8166405741503688
124
  name: Pearson Max
125
  - type: spearman_max
126
- value: 0.8039812454999457
127
  name: Spearman Max
128
  ---
129
 
@@ -224,33 +224,33 @@ You can finetune this model on your own dataset.
224
 
225
  | Metric | Value |
226
  |:--------------------|:-----------|
227
- | pearson_cosine | 0.8039 |
228
- | **spearman_cosine** | **0.7952** |
229
- | pearson_manhattan | 0.7803 |
230
- | spearman_manhattan | 0.7844 |
231
- | pearson_euclidean | 0.7833 |
232
- | spearman_euclidean | 0.7868 |
233
- | pearson_dot | 0.739 |
234
- | spearman_dot | 0.7653 |
235
- | pearson_max | 0.8039 |
236
- | spearman_max | 0.7952 |
237
 
238
  #### Semantic Similarity
239
  * Dataset: `MiniLM-test`
240
  * Evaluated with [<code>EmbeddingSimilarityEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator)
241
 
242
- | Metric | Value |
243
- |:--------------------|:----------|
244
- | pearson_cosine | 0.8166 |
245
- | **spearman_cosine** | **0.804** |
246
- | pearson_manhattan | 0.7971 |
247
- | spearman_manhattan | 0.797 |
248
- | pearson_euclidean | 0.7994 |
249
- | spearman_euclidean | 0.7987 |
250
- | pearson_dot | 0.7454 |
251
- | spearman_dot | 0.7692 |
252
- | pearson_max | 0.8166 |
253
- | spearman_max | 0.804 |
254
 
255
  <!--
256
  ## Bias, Risks and Limitations
@@ -393,41 +393,334 @@ You can finetune this model on your own dataset.
393
  </details>
394
 
395
  ### Training Logs
396
- | Epoch | Step | Training Loss | loss | MiniLM-dev_spearman_cosine | MiniLM-test_spearman_cosine |
397
- |:-----:|:----:|:-------------:|:------:|:--------------------------:|:---------------------------:|
398
- | 0.032 | 100 | 6.7664 | - | - | - |
399
- | 0.064 | 200 | 4.3148 | - | - | - |
400
- | 0.096 | 300 | 3.0991 | - | - | - |
401
- | 0.128 | 400 | 3.0274 | - | - | - |
402
- | 0.16 | 500 | 3.6869 | - | - | - |
403
- | 0.192 | 600 | 4.9801 | - | - | - |
404
- | 0.224 | 700 | 3.5306 | - | - | - |
405
- | 0.256 | 800 | 2.8376 | - | - | - |
406
- | 0.288 | 900 | 4.0961 | - | - | - |
407
- | 0.32 | 1000 | 2.7293 | 2.4118 | 0.7245 | - |
408
- | 0.352 | 1100 | 3.657 | - | - | - |
409
- | 0.384 | 1200 | 4.0484 | - | - | - |
410
- | 0.416 | 1300 | 3.2268 | - | - | - |
411
- | 0.448 | 1400 | 2.6421 | - | - | - |
412
- | 0.48 | 1500 | 3.3672 | - | - | - |
413
- | 0.512 | 1600 | 3.285 | - | - | - |
414
- | 0.544 | 1700 | 3.6787 | - | - | - |
415
- | 0.576 | 1800 | 3.8738 | - | - | - |
416
- | 0.608 | 1900 | 2.7925 | - | - | - |
417
- | 0.64 | 2000 | 2.4805 | 1.8042 | 0.7901 | - |
418
- | 0.672 | 2100 | 3.2279 | - | - | - |
419
- | 0.704 | 2200 | 2.8016 | - | - | - |
420
- | 0.736 | 2300 | 4.1615 | - | - | - |
421
- | 0.768 | 2400 | 3.5664 | - | - | - |
422
- | 0.8 | 2500 | 2.9362 | - | - | - |
423
- | 0.832 | 2600 | 3.0684 | - | - | - |
424
- | 0.864 | 2700 | 3.168 | - | - | - |
425
- | 0.896 | 2800 | 2.707 | - | - | - |
426
- | 0.928 | 2900 | 2.2231 | - | - | - |
427
- | 0.96 | 3000 | 1.2541 | 1.6753 | 0.7952 | - |
428
- | 0.992 | 3100 | 0.5943 | - | - | - |
429
- | 1.0 | 3125 | - | - | - | 0.8040 |
430
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
431
 
432
  ### Framework Versions
433
  - Python: 3.10.14
 
6
  - sentence-similarity
7
  - feature-extraction
8
  - generated_from_trainer
9
+ - dataset_size:1027471
10
  - loss:CoSENTLoss
11
  base_model: sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2
12
  datasets: []
 
59
  type: MiniLM-dev
60
  metrics:
61
  - type: pearson_cosine
62
+ value: 0.8464008477003933
63
  name: Pearson Cosine
64
  - type: spearman_cosine
65
+ value: 0.8128883563290172
66
  name: Spearman Cosine
67
  - type: pearson_manhattan
68
+ value: 0.8204825552661638
69
  name: Pearson Manhattan
70
  - type: spearman_manhattan
71
+ value: 0.8069612779979122
72
  name: Spearman Manhattan
73
  - type: pearson_euclidean
74
+ value: 0.8207664286968728
75
  name: Pearson Euclidean
76
  - type: spearman_euclidean
77
+ value: 0.806851537985582
78
  name: Spearman Euclidean
79
  - type: pearson_dot
80
+ value: 0.7927608791449223
81
  name: Pearson Dot
82
  - type: spearman_dot
83
+ value: 0.8078229606916496
84
  name: Spearman Dot
85
  - type: pearson_max
86
+ value: 0.8464008477003933
87
  name: Pearson Max
88
  - type: spearman_max
89
+ value: 0.8128883563290172
90
  name: Spearman Max
91
  - task:
92
  type: semantic-similarity
 
96
  type: MiniLM-test
97
  metrics:
98
  - type: pearson_cosine
99
+ value: 0.9079517679775697
100
  name: Pearson Cosine
101
  - type: spearman_cosine
102
+ value: 0.842595786650747
103
  name: Spearman Cosine
104
  - type: pearson_manhattan
105
+ value: 0.885352838846903
106
  name: Pearson Manhattan
107
  - type: spearman_manhattan
108
+ value: 0.8389283098138718
109
  name: Spearman Manhattan
110
  - type: pearson_euclidean
111
+ value: 0.8858228063346806
112
  name: Pearson Euclidean
113
  - type: spearman_euclidean
114
+ value: 0.8390847286161828
115
  name: Spearman Euclidean
116
  - type: pearson_dot
117
+ value: 0.8618645999355777
118
  name: Pearson Dot
119
  - type: spearman_dot
120
+ value: 0.8389938584674199
121
  name: Spearman Dot
122
  - type: pearson_max
123
+ value: 0.9079517679775697
124
  name: Pearson Max
125
  - type: spearman_max
126
+ value: 0.842595786650747
127
  name: Spearman Max
128
  ---
129
 
 
224
 
225
  | Metric | Value |
226
  |:--------------------|:-----------|
227
+ | pearson_cosine | 0.8464 |
228
+ | **spearman_cosine** | **0.8129** |
229
+ | pearson_manhattan | 0.8205 |
230
+ | spearman_manhattan | 0.807 |
231
+ | pearson_euclidean | 0.8208 |
232
+ | spearman_euclidean | 0.8069 |
233
+ | pearson_dot | 0.7928 |
234
+ | spearman_dot | 0.8078 |
235
+ | pearson_max | 0.8464 |
236
+ | spearman_max | 0.8129 |
237
 
238
  #### Semantic Similarity
239
  * Dataset: `MiniLM-test`
240
  * Evaluated with [<code>EmbeddingSimilarityEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator)
241
 
242
+ | Metric | Value |
243
+ |:--------------------|:-----------|
244
+ | pearson_cosine | 0.908 |
245
+ | **spearman_cosine** | **0.8426** |
246
+ | pearson_manhattan | 0.8854 |
247
+ | spearman_manhattan | 0.8389 |
248
+ | pearson_euclidean | 0.8858 |
249
+ | spearman_euclidean | 0.8391 |
250
+ | pearson_dot | 0.8619 |
251
+ | spearman_dot | 0.839 |
252
+ | pearson_max | 0.908 |
253
+ | spearman_max | 0.8426 |
254
 
255
  <!--
256
  ## Bias, Risks and Limitations
 
393
  </details>
394
 
395
  ### Training Logs
396
+ <details><summary>Click to expand</summary>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
397
 
398
+ | Epoch | Step | Training Loss | loss | MiniLM-dev_spearman_cosine | MiniLM-test_spearman_cosine |
399
+ |:------:|:-----:|:-------------:|:------:|:--------------------------:|:---------------------------:|
400
+ | 0.0031 | 100 | 7.4879 | - | - | - |
401
+ | 0.0062 | 200 | 6.4531 | - | - | - |
402
+ | 0.0093 | 300 | 6.4185 | - | - | - |
403
+ | 0.0125 | 400 | 4.5043 | - | - | - |
404
+ | 0.0156 | 500 | 5.1274 | - | - | - |
405
+ | 0.0187 | 600 | 6.0006 | - | - | - |
406
+ | 0.0218 | 700 | 4.8066 | - | - | - |
407
+ | 0.0249 | 800 | 3.9536 | - | - | - |
408
+ | 0.0280 | 900 | 4.7259 | - | - | - |
409
+ | 0.0311 | 1000 | 3.7583 | 2.6440 | 0.6640 | - |
410
+ | 0.0343 | 1100 | 3.9905 | - | - | - |
411
+ | 0.0374 | 1200 | 4.8914 | - | - | - |
412
+ | 0.0405 | 1300 | 3.895 | - | - | - |
413
+ | 0.0436 | 1400 | 3.1582 | - | - | - |
414
+ | 0.0467 | 1500 | 3.7172 | - | - | - |
415
+ | 0.0498 | 1600 | 3.6785 | - | - | - |
416
+ | 0.0529 | 1700 | 3.9632 | - | - | - |
417
+ | 0.0561 | 1800 | 3.9643 | - | - | - |
418
+ | 0.0592 | 1900 | 2.829 | - | - | - |
419
+ | 0.0623 | 2000 | 2.5923 | 2.3344 | 0.7459 | - |
420
+ | 0.0654 | 2100 | 3.1617 | - | - | - |
421
+ | 0.0685 | 2200 | 2.6366 | - | - | - |
422
+ | 0.0716 | 2300 | 4.3751 | - | - | - |
423
+ | 0.0747 | 2400 | 3.4732 | - | - | - |
424
+ | 0.0779 | 2500 | 2.5695 | - | - | - |
425
+ | 0.0810 | 2600 | 2.7479 | - | - | - |
426
+ | 0.0841 | 2700 | 2.5274 | - | - | - |
427
+ | 0.0872 | 2800 | 2.4204 | - | - | - |
428
+ | 0.0903 | 2900 | 4.1305 | - | - | - |
429
+ | 0.0934 | 3000 | 4.091 | 2.0951 | 0.7426 | - |
430
+ | 0.0965 | 3100 | 3.7972 | - | - | - |
431
+ | 0.0997 | 3200 | 2.6029 | - | - | - |
432
+ | 0.1028 | 3300 | 3.2422 | - | - | - |
433
+ | 0.1059 | 3400 | 3.3747 | - | - | - |
434
+ | 0.1090 | 3500 | 3.3358 | - | - | - |
435
+ | 0.1121 | 3600 | 2.8658 | - | - | - |
436
+ | 0.1152 | 3700 | 2.6436 | - | - | - |
437
+ | 0.1183 | 3800 | 2.2006 | - | - | - |
438
+ | 0.1215 | 3900 | 2.0549 | - | - | - |
439
+ | 0.1246 | 4000 | 2.4642 | 3.4108 | 0.7236 | - |
440
+ | 0.1277 | 4100 | 2.9219 | - | - | - |
441
+ | 0.1308 | 4200 | 2.6581 | - | - | - |
442
+ | 0.1339 | 4300 | 2.2697 | - | - | - |
443
+ | 0.1370 | 4400 | 2.7215 | - | - | - |
444
+ | 0.1401 | 4500 | 2.6023 | - | - | - |
445
+ | 0.1433 | 4600 | 1.8772 | - | - | - |
446
+ | 0.1464 | 4700 | 2.6885 | - | - | - |
447
+ | 0.1495 | 4800 | 2.6005 | - | - | - |
448
+ | 0.1526 | 4900 | 1.4849 | - | - | - |
449
+ | 0.1557 | 5000 | 2.4896 | 3.4860 | 0.7117 | - |
450
+ | 0.1588 | 5100 | 2.6038 | - | - | - |
451
+ | 0.1619 | 5200 | 2.0584 | - | - | - |
452
+ | 0.1651 | 5300 | 1.9156 | - | - | - |
453
+ | 0.1682 | 5400 | 1.467 | - | - | - |
454
+ | 0.1713 | 5500 | 0.5799 | - | - | - |
455
+ | 0.1744 | 5600 | 1.617 | - | - | - |
456
+ | 0.1775 | 5700 | 1.3764 | - | - | - |
457
+ | 0.1806 | 5800 | 3.067 | - | - | - |
458
+ | 0.1837 | 5900 | 2.2463 | - | - | - |
459
+ | 0.1869 | 6000 | 1.5466 | 2.5326 | 0.7721 | - |
460
+ | 0.1900 | 6100 | 1.4097 | - | - | - |
461
+ | 0.1931 | 6200 | 1.7852 | - | - | - |
462
+ | 0.1962 | 6300 | 1.2715 | - | - | - |
463
+ | 0.1993 | 6400 | 2.5585 | - | - | - |
464
+ | 0.2024 | 6500 | 2.4665 | - | - | - |
465
+ | 0.2055 | 6600 | 1.7246 | - | - | - |
466
+ | 0.2087 | 6700 | 1.145 | - | - | - |
467
+ | 0.2118 | 6800 | 1.614 | - | - | - |
468
+ | 0.2149 | 6900 | 1.7206 | - | - | - |
469
+ | 0.2180 | 7000 | 2.6349 | 2.6824 | 0.7652 | - |
470
+ | 0.2211 | 7100 | 2.1896 | - | - | - |
471
+ | 0.2242 | 7200 | 1.9106 | - | - | - |
472
+ | 0.2274 | 7300 | 1.3783 | - | - | - |
473
+ | 0.2305 | 7400 | 0.7119 | - | - | - |
474
+ | 0.2336 | 7500 | 1.5037 | - | - | - |
475
+ | 0.2367 | 7600 | 1.8365 | - | - | - |
476
+ | 0.2398 | 7700 | 1.3817 | - | - | - |
477
+ | 0.2429 | 7800 | 1.7101 | - | - | - |
478
+ | 0.2460 | 7900 | 1.6716 | - | - | - |
479
+ | 0.2492 | 8000 | 1.3013 | 3.5864 | 0.7401 | - |
480
+ | 0.2523 | 8100 | 1.5131 | - | - | - |
481
+ | 0.2554 | 8200 | 2.3699 | - | - | - |
482
+ | 0.2585 | 8300 | 1.6179 | - | - | - |
483
+ | 0.2616 | 8400 | 1.3 | - | - | - |
484
+ | 0.2647 | 8500 | 1.5151 | - | - | - |
485
+ | 0.2678 | 8600 | 2.8703 | - | - | - |
486
+ | 0.2710 | 8700 | 2.5076 | - | - | - |
487
+ | 0.2741 | 8800 | 1.9876 | - | - | - |
488
+ | 0.2772 | 8900 | 1.5823 | - | - | - |
489
+ | 0.2803 | 9000 | 1.0845 | 2.4197 | 0.7833 | - |
490
+ | 0.2834 | 9100 | 1.2871 | - | - | - |
491
+ | 0.2865 | 9200 | 1.3901 | - | - | - |
492
+ | 0.2896 | 9300 | 1.1607 | - | - | - |
493
+ | 0.2928 | 9400 | 2.1171 | - | - | - |
494
+ | 0.2959 | 9500 | 1.4335 | - | - | - |
495
+ | 0.2990 | 9600 | 0.801 | - | - | - |
496
+ | 0.3021 | 9700 | 1.4567 | - | - | - |
497
+ | 0.3052 | 9800 | 1.7046 | - | - | - |
498
+ | 0.3083 | 9900 | 1.4378 | - | - | - |
499
+ | 0.3114 | 10000 | 2.3191 | 2.3063 | 0.7903 | - |
500
+ | 0.3146 | 10100 | 1.6518 | - | - | - |
501
+ | 0.3177 | 10200 | 0.9857 | - | - | - |
502
+ | 0.3208 | 10300 | 2.2052 | - | - | - |
503
+ | 0.3239 | 10400 | 2.0443 | - | - | - |
504
+ | 0.3270 | 10500 | 2.08 | - | - | - |
505
+ | 0.3301 | 10600 | 2.0009 | - | - | - |
506
+ | 0.3332 | 10700 | 1.3274 | - | - | - |
507
+ | 0.3364 | 10800 | 1.0298 | - | - | - |
508
+ | 0.3395 | 10900 | 1.7127 | - | - | - |
509
+ | 0.3426 | 11000 | 1.3371 | 4.0607 | 0.7211 | - |
510
+ | 0.3457 | 11100 | 2.7555 | - | - | - |
511
+ | 0.3488 | 11200 | 4.1792 | - | - | - |
512
+ | 0.3519 | 11300 | 2.0931 | - | - | - |
513
+ | 0.3550 | 11400 | 2.4591 | - | - | - |
514
+ | 0.3582 | 11500 | 3.4962 | - | - | - |
515
+ | 0.3613 | 11600 | 1.9228 | - | - | - |
516
+ | 0.3644 | 11700 | 2.7295 | - | - | - |
517
+ | 0.3675 | 11800 | 1.5425 | - | - | - |
518
+ | 0.3706 | 11900 | 1.1586 | - | - | - |
519
+ | 0.3737 | 12000 | 1.1336 | 2.2959 | 0.7890 | - |
520
+ | 0.3768 | 12100 | 1.572 | - | - | - |
521
+ | 0.3800 | 12200 | 1.2827 | - | - | - |
522
+ | 0.3831 | 12300 | 1.6352 | - | - | - |
523
+ | 0.3862 | 12400 | 1.4708 | - | - | - |
524
+ | 0.3893 | 12500 | 1.4719 | - | - | - |
525
+ | 0.3924 | 12600 | 1.4136 | - | - | - |
526
+ | 0.3955 | 12700 | 1.3969 | - | - | - |
527
+ | 0.3986 | 12800 | 1.7228 | - | - | - |
528
+ | 0.4018 | 12900 | 4.2842 | - | - | - |
529
+ | 0.4049 | 13000 | 3.5861 | 2.1113 | 0.7956 | - |
530
+ | 0.4080 | 13100 | 2.9718 | - | - | - |
531
+ | 0.4111 | 13200 | 3.1554 | - | - | - |
532
+ | 0.4142 | 13300 | 3.1357 | - | - | - |
533
+ | 0.4173 | 13400 | 2.8488 | - | - | - |
534
+ | 0.4204 | 13500 | 3.7433 | - | - | - |
535
+ | 0.4236 | 13600 | 2.4195 | - | - | - |
536
+ | 0.4267 | 13700 | 2.1384 | - | - | - |
537
+ | 0.4298 | 13800 | 2.7965 | - | - | - |
538
+ | 0.4329 | 13900 | 1.7869 | - | - | - |
539
+ | 0.4360 | 14000 | 3.0356 | 2.7234 | 0.7697 | - |
540
+ | 0.4391 | 14100 | 3.4984 | - | - | - |
541
+ | 0.4422 | 14200 | 2.4959 | - | - | - |
542
+ | 0.4454 | 14300 | 2.4615 | - | - | - |
543
+ | 0.4485 | 14400 | 2.6309 | - | - | - |
544
+ | 0.4516 | 14500 | 1.9831 | - | - | - |
545
+ | 0.4547 | 14600 | 3.25 | - | - | - |
546
+ | 0.4578 | 14700 | 3.3112 | - | - | - |
547
+ | 0.4609 | 14800 | 1.9912 | - | - | - |
548
+ | 0.4640 | 14900 | 1.9252 | - | - | - |
549
+ | 0.4672 | 15000 | 2.4545 | 2.0730 | 0.7972 | - |
550
+ | 0.4703 | 15100 | 1.6943 | - | - | - |
551
+ | 0.4734 | 15200 | 2.2851 | - | - | - |
552
+ | 0.4765 | 15300 | 2.4327 | - | - | - |
553
+ | 0.4796 | 15400 | 1.3503 | - | - | - |
554
+ | 0.4827 | 15500 | 1.1419 | - | - | - |
555
+ | 0.4858 | 15600 | 1.7906 | - | - | - |
556
+ | 0.4890 | 15700 | 1.6504 | - | - | - |
557
+ | 0.4921 | 15800 | 1.6908 | - | - | - |
558
+ | 0.4952 | 15900 | 3.0954 | - | - | - |
559
+ | 0.4983 | 16000 | 1.7151 | 2.0042 | 0.8044 | - |
560
+ | 0.5014 | 16100 | 1.5165 | - | - | - |
561
+ | 0.5045 | 16200 | 2.5573 | - | - | - |
562
+ | 0.5076 | 16300 | 1.3401 | - | - | - |
563
+ | 0.5108 | 16400 | 2.5464 | - | - | - |
564
+ | 0.5139 | 16500 | 2.4564 | - | - | - |
565
+ | 0.5170 | 16600 | 2.1667 | - | - | - |
566
+ | 0.5201 | 16700 | 1.2402 | - | - | - |
567
+ | 0.5232 | 16800 | 1.932 | - | - | - |
568
+ | 0.5263 | 16900 | 1.1811 | - | - | - |
569
+ | 0.5294 | 17000 | 2.2014 | 2.0475 | 0.8062 | - |
570
+ | 0.5326 | 17100 | 2.6535 | - | - | - |
571
+ | 0.5357 | 17200 | 1.8715 | - | - | - |
572
+ | 0.5388 | 17300 | 1.9385 | - | - | - |
573
+ | 0.5419 | 17400 | 2.0398 | - | - | - |
574
+ | 0.5450 | 17500 | 1.3436 | - | - | - |
575
+ | 0.5481 | 17600 | 2.0687 | - | - | - |
576
+ | 0.5512 | 17700 | 1.6224 | - | - | - |
577
+ | 0.5544 | 17800 | 1.0539 | - | - | - |
578
+ | 0.5575 | 17900 | 1.1162 | - | - | - |
579
+ | 0.5606 | 18000 | 1.6334 | 2.4120 | 0.7964 | - |
580
+ | 0.5637 | 18100 | 1.247 | - | - | - |
581
+ | 0.5668 | 18200 | 2.4652 | - | - | - |
582
+ | 0.5699 | 18300 | 1.8593 | - | - | - |
583
+ | 0.5730 | 18400 | 1.1875 | - | - | - |
584
+ | 0.5762 | 18500 | 2.1173 | - | - | - |
585
+ | 0.5793 | 18600 | 1.7473 | - | - | - |
586
+ | 0.5824 | 18700 | 2.1865 | - | - | - |
587
+ | 0.5855 | 18800 | 1.683 | - | - | - |
588
+ | 0.5886 | 18900 | 1.6522 | - | - | - |
589
+ | 0.5917 | 19000 | 1.0526 | 2.0743 | 0.8033 | - |
590
+ | 0.5948 | 19100 | 1.5001 | - | - | - |
591
+ | 0.5980 | 19200 | 1.2606 | - | - | - |
592
+ | 0.6011 | 19300 | 1.0597 | - | - | - |
593
+ | 0.6042 | 19400 | 1.8603 | - | - | - |
594
+ | 0.6073 | 19500 | 1.4883 | - | - | - |
595
+ | 0.6104 | 19600 | 0.6594 | - | - | - |
596
+ | 0.6135 | 19700 | 0.9557 | - | - | - |
597
+ | 0.6166 | 19800 | 0.8651 | - | - | - |
598
+ | 0.6198 | 19900 | 1.0326 | - | - | - |
599
+ | 0.6229 | 20000 | 1.2785 | 2.0868 | 0.8075 | - |
600
+ | 0.6260 | 20100 | 1.2881 | - | - | - |
601
+ | 0.6291 | 20200 | 0.5919 | - | - | - |
602
+ | 0.6322 | 20300 | 1.69 | - | - | - |
603
+ | 0.6353 | 20400 | 1.0285 | - | - | - |
604
+ | 0.6385 | 20500 | 0.8843 | - | - | - |
605
+ | 0.6416 | 20600 | 1.3756 | - | - | - |
606
+ | 0.6447 | 20700 | 0.9646 | - | - | - |
607
+ | 0.6478 | 20800 | 0.8052 | - | - | - |
608
+ | 0.6509 | 20900 | 0.8996 | - | - | - |
609
+ | 0.6540 | 21000 | 1.2207 | 2.2881 | 0.8029 | - |
610
+ | 0.6571 | 21100 | 1.3225 | - | - | - |
611
+ | 0.6603 | 21200 | 1.8101 | - | - | - |
612
+ | 0.6634 | 21300 | 0.8756 | - | - | - |
613
+ | 0.6665 | 21400 | 0.9877 | - | - | - |
614
+ | 0.6696 | 21500 | 1.7329 | - | - | - |
615
+ | 0.6727 | 21600 | 1.6885 | - | - | - |
616
+ | 0.6758 | 21700 | 1.2132 | - | - | - |
617
+ | 0.6789 | 21800 | 1.4888 | - | - | - |
618
+ | 0.6821 | 21900 | 1.403 | - | - | - |
619
+ | 0.6852 | 22000 | 0.5995 | 2.1952 | 0.8036 | - |
620
+ | 0.6883 | 22100 | 0.9658 | - | - | - |
621
+ | 0.6914 | 22200 | 1.1485 | - | - | - |
622
+ | 0.6945 | 22300 | 1.089 | - | - | - |
623
+ | 0.6976 | 22400 | 1.2719 | - | - | - |
624
+ | 0.7007 | 22500 | 0.9611 | - | - | - |
625
+ | 0.7039 | 22600 | 0.9398 | - | - | - |
626
+ | 0.7070 | 22700 | 0.7931 | - | - | - |
627
+ | 0.7101 | 22800 | 1.1224 | - | - | - |
628
+ | 0.7132 | 22900 | 2.032 | - | - | - |
629
+ | 0.7163 | 23000 | 1.3664 | 2.1043 | 0.8075 | - |
630
+ | 0.7194 | 23100 | 0.7777 | - | - | - |
631
+ | 0.7225 | 23200 | 0.9427 | - | - | - |
632
+ | 0.7257 | 23300 | 0.8846 | - | - | - |
633
+ | 0.7288 | 23400 | 1.0039 | - | - | - |
634
+ | 0.7319 | 23500 | 0.9344 | - | - | - |
635
+ | 0.7350 | 23600 | 1.3712 | - | - | - |
636
+ | 0.7381 | 23700 | 0.8039 | - | - | - |
637
+ | 0.7412 | 23800 | 1.0735 | - | - | - |
638
+ | 0.7443 | 23900 | 0.9851 | - | - | - |
639
+ | 0.7475 | 24000 | 1.8673 | 2.1547 | 0.8066 | - |
640
+ | 0.7506 | 24100 | 5.5805 | - | - | - |
641
+ | 0.7537 | 24200 | 4.1286 | - | - | - |
642
+ | 0.7568 | 24300 | 2.2206 | - | - | - |
643
+ | 0.7599 | 24400 | 3.6468 | - | - | - |
644
+ | 0.7630 | 24500 | 2.9307 | - | - | - |
645
+ | 0.7661 | 24600 | 3.8745 | - | - | - |
646
+ | 0.7693 | 24700 | 2.2125 | - | - | - |
647
+ | 0.7724 | 24800 | 2.3844 | - | - | - |
648
+ | 0.7755 | 24900 | 1.5081 | - | - | - |
649
+ | 0.7786 | 25000 | 1.5982 | 1.8491 | 0.8145 | - |
650
+ | 0.7817 | 25100 | 2.1563 | - | - | - |
651
+ | 0.7848 | 25200 | 1.8558 | - | - | - |
652
+ | 0.7879 | 25300 | 2.2087 | - | - | - |
653
+ | 0.7911 | 25400 | 2.3953 | - | - | - |
654
+ | 0.7942 | 25500 | 1.4072 | - | - | - |
655
+ | 0.7973 | 25600 | 1.4637 | - | - | - |
656
+ | 0.8004 | 25700 | 2.2037 | - | - | - |
657
+ | 0.8035 | 25800 | 1.6241 | - | - | - |
658
+ | 0.8066 | 25900 | 1.4882 | - | - | - |
659
+ | 0.8097 | 26000 | 0.9108 | 1.9292 | 0.8115 | - |
660
+ | 0.8129 | 26100 | 0.9198 | - | - | - |
661
+ | 0.8160 | 26200 | 1.2981 | - | - | - |
662
+ | 0.8191 | 26300 | 1.0513 | - | - | - |
663
+ | 0.8222 | 26400 | 1.389 | - | - | - |
664
+ | 0.8253 | 26500 | 5.8539 | - | - | - |
665
+ | 0.8284 | 26600 | 3.547 | - | - | - |
666
+ | 0.8315 | 26700 | 2.3285 | - | - | - |
667
+ | 0.8347 | 26800 | 2.8112 | - | - | - |
668
+ | 0.8378 | 26900 | 3.3717 | - | - | - |
669
+ | 0.8409 | 27000 | 2.5921 | 1.9430 | 0.8108 | - |
670
+ | 0.8440 | 27100 | 1.5048 | - | - | - |
671
+ | 0.8471 | 27200 | 1.5 | - | - | - |
672
+ | 0.8502 | 27300 | 0.778 | - | - | - |
673
+ | 0.8533 | 27400 | 0.9557 | - | - | - |
674
+ | 0.8565 | 27500 | 1.347 | - | - | - |
675
+ | 0.8596 | 27600 | 1.5882 | - | - | - |
676
+ | 0.8627 | 27700 | 1.7333 | - | - | - |
677
+ | 0.8658 | 27800 | 1.5683 | - | - | - |
678
+ | 0.8689 | 27900 | 0.7698 | - | - | - |
679
+ | 0.8720 | 28000 | 1.2758 | 1.9704 | 0.8127 | - |
680
+ | 0.8751 | 28100 | 1.3248 | - | - | - |
681
+ | 0.8783 | 28200 | 1.041 | - | - | - |
682
+ | 0.8814 | 28300 | 1.6066 | - | - | - |
683
+ | 0.8845 | 28400 | 1.9033 | - | - | - |
684
+ | 0.8876 | 28500 | 0.8781 | - | - | - |
685
+ | 0.8907 | 28600 | 0.9345 | - | - | - |
686
+ | 0.8938 | 28700 | 0.9209 | - | - | - |
687
+ | 0.8969 | 28800 | 1.1443 | - | - | - |
688
+ | 0.9001 | 28900 | 0.9522 | - | - | - |
689
+ | 0.9032 | 29000 | 1.4295 | 2.0572 | 0.8111 | - |
690
+ | 0.9063 | 29100 | 0.9005 | - | - | - |
691
+ | 0.9094 | 29200 | 1.0024 | - | - | - |
692
+ | 0.9125 | 29300 | 1.3573 | - | - | - |
693
+ | 0.9156 | 29400 | 1.0805 | - | - | - |
694
+ | 0.9187 | 29500 | 1.3308 | - | - | - |
695
+ | 0.9219 | 29600 | 1.4853 | - | - | - |
696
+ | 0.9250 | 29700 | 2.0785 | - | - | - |
697
+ | 0.9281 | 29800 | 0.9283 | - | - | - |
698
+ | 0.9312 | 29900 | 0.8081 | - | - | - |
699
+ | 0.9343 | 30000 | 0.4223 | 2.0404 | 0.8115 | - |
700
+ | 0.9374 | 30100 | 0.8565 | - | - | - |
701
+ | 0.9405 | 30200 | 0.6674 | - | - | - |
702
+ | 0.9437 | 30300 | 0.5499 | - | - | - |
703
+ | 0.9468 | 30400 | 0.3212 | - | - | - |
704
+ | 0.9499 | 30500 | 0.166 | - | - | - |
705
+ | 0.9530 | 30600 | 0.1096 | - | - | - |
706
+ | 0.9561 | 30700 | 0.0382 | - | - | - |
707
+ | 0.9592 | 30800 | 0.2927 | - | - | - |
708
+ | 0.9623 | 30900 | 0.4097 | - | - | - |
709
+ | 0.9655 | 31000 | 0.5554 | 2.0068 | 0.8130 | - |
710
+ | 0.9686 | 31100 | 0.5783 | - | - | - |
711
+ | 0.9717 | 31200 | 0.376 | - | - | - |
712
+ | 0.9748 | 31300 | 0.3469 | - | - | - |
713
+ | 0.9779 | 31400 | 0.3043 | - | - | - |
714
+ | 0.9810 | 31500 | 0.4023 | - | - | - |
715
+ | 0.9841 | 31600 | 0.1876 | - | - | - |
716
+ | 0.9873 | 31700 | 0.4473 | - | - | - |
717
+ | 0.9904 | 31800 | 0.3256 | - | - | - |
718
+ | 0.9935 | 31900 | 0.4875 | - | - | - |
719
+ | 0.9966 | 32000 | 0.1807 | 2.0122 | 0.8129 | - |
720
+ | 0.9997 | 32100 | 0.3249 | - | - | - |
721
+ | 1.0 | 32109 | - | - | - | 0.8426 |
722
+
723
+ </details>
724
 
725
  ### Framework Versions
726
  - Python: 3.10.14
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "./models/mpnet-base-all-nli-triplet/final",
3
  "architectures": [
4
  "BertModel"
5
  ],
 
1
  {
2
+ "_name_or_path": "sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2",
3
  "architectures": [
4
  "BertModel"
5
  ],
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:eb2587ad4d773b2bc27b5137a6aca94c93f0e7025be13f2ac8747b9ae922e601
3
  size 470637416
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:045f266e9f6f0aae65f8b6adc4633047fafc171ce35def4d1086599755cde290
3
  size 470637416