Adding ONNX file of this model

#4
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ onnx/tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md CHANGED
@@ -21,15 +21,19 @@ tags:
21
  - multilingual
22
  - mteb
23
  - sentence-transformers
 
 
24
  model-index:
25
  - name: ibm-granite/granite-embedding-278m-multilingual
26
  results:
27
- - dataset:
28
- config: en-ext
 
29
  name: MTEB AmazonCounterfactualClassification (en-ext)
30
- revision: e8379541af4e31359cca9fbcf4b00f2671dba205
31
- split: test
32
  type: mteb/amazon_counterfactual
 
 
 
33
  metrics:
34
  - type: accuracy
35
  value: 73.4333
@@ -43,14 +47,14 @@ model-index:
43
  value: 23.347
44
  - type: main_score
45
  value: 73.4333
46
- task:
47
  type: Classification
48
- - dataset:
49
- config: en
50
  name: MTEB AmazonCounterfactualClassification (en)
51
- revision: e8379541af4e31359cca9fbcf4b00f2671dba205
52
- split: test
53
  type: mteb/amazon_counterfactual
 
 
 
54
  metrics:
55
  - type: accuracy
56
  value: 71.806
@@ -64,14 +68,14 @@ model-index:
64
  value: 34.045700000000004
65
  - type: main_score
66
  value: 71.806
67
- task:
68
  type: Classification
69
- - dataset:
70
- config: default
71
  name: MTEB AmazonPolarityClassification (default)
72
- revision: e2d317d38cd51312af73b3d32a06d1a08b442046
73
- split: test
74
  type: mteb/amazon_polarity
 
 
 
75
  metrics:
76
  - type: accuracy
77
  value: 67.5907
@@ -85,14 +89,14 @@ model-index:
85
  value: 62.0368
86
  - type: main_score
87
  value: 67.5907
88
- task:
89
  type: Classification
90
- - dataset:
91
- config: en
92
  name: MTEB AmazonReviewsClassification (en)
93
- revision: 1399c76144fd37290681b995c656ef9b2e06e26d
94
- split: test
95
  type: mteb/amazon_reviews_multi
 
 
 
96
  metrics:
97
  - type: accuracy
98
  value: 37.278
@@ -102,14 +106,14 @@ model-index:
102
  value: 36.4099
103
  - type: main_score
104
  value: 37.278
105
- task:
106
- type: Classification
107
- - dataset:
108
- config: default
109
  name: MTEB AppsRetrieval (default)
110
- revision: f22508f96b7a36c2415181ed8bb76f76e04ae2d5
111
- split: test
112
  type: CoIR-Retrieval/apps
 
 
 
113
  metrics:
114
  - type: ndcg_at_1
115
  value: 3.453
@@ -393,14 +397,14 @@ model-index:
393
  value: 30.7653
394
  - type: main_score
395
  value: 6.214
396
- task:
397
  type: Retrieval
398
- - dataset:
399
- config: default
400
  name: MTEB ArguAna (default)
401
- revision: c22ab2a51041ffd869aaddef7af8d8215647e41a
402
- split: test
403
  type: mteb/arguana
 
 
 
404
  metrics:
405
  - type: ndcg_at_1
406
  value: 31.152
@@ -684,14 +688,14 @@ model-index:
684
  value: 10.641399999999999
685
  - type: main_score
686
  value: 55.24400000000001
687
- task:
688
- type: Retrieval
689
- - dataset:
690
- config: default
691
  name: MTEB ArxivClusteringP2P (default)
692
- revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d
693
- split: test
694
  type: mteb/arxiv-clustering-p2p
 
 
 
695
  metrics:
696
  - type: v_measure
697
  value: 43.1321
@@ -699,14 +703,14 @@ model-index:
699
  value: 13.594000000000001
700
  - type: main_score
701
  value: 43.1321
702
- task:
703
  type: Clustering
704
- - dataset:
705
- config: default
706
  name: MTEB ArxivClusteringS2S (default)
707
- revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53
708
- split: test
709
  type: mteb/arxiv-clustering-s2s
 
 
 
710
  metrics:
711
  - type: v_measure
712
  value: 32.9343
@@ -714,14 +718,14 @@ model-index:
714
  value: 14.2478
715
  - type: main_score
716
  value: 32.9343
717
- task:
718
- type: Clustering
719
- - dataset:
720
- config: default
721
  name: MTEB AskUbuntuDupQuestions (default)
722
- revision: 2000358ca161889fa9c082cb41daa8dcfb161a54
723
- split: test
724
  type: mteb/askubuntudupquestions-reranking
 
 
 
725
  metrics:
726
  - type: map
727
  value: 62.3443
@@ -741,14 +745,14 @@ model-index:
741
  value: 19.1211
742
  - type: main_score
743
  value: 62.3443
744
- task:
745
- type: Reranking
746
- - dataset:
747
- config: default
748
  name: MTEB BIOSSES (default)
749
- revision: d3fb88f8f02e40887cd149695127462bbcf29b4a
750
- split: test
751
  type: mteb/biosses-sts
 
 
 
752
  metrics:
753
  - type: pearson
754
  value: 84.3253
@@ -768,14 +772,14 @@ model-index:
768
  value: 81.6362
769
  - type: main_score
770
  value: 81.6362
771
- task:
772
- type: STS
773
- - dataset:
774
- config: default
775
  name: MTEB Banking77Classification (default)
776
- revision: 0fd18e25b25c072e09e0d92ab615fda904d66300
777
- split: test
778
  type: mteb/banking77
 
 
 
779
  metrics:
780
  - type: accuracy
781
  value: 78.0617
@@ -785,14 +789,14 @@ model-index:
785
  value: 77.2085
786
  - type: main_score
787
  value: 78.0617
788
- task:
789
- type: Classification
790
- - dataset:
791
- config: default
792
  name: MTEB BiorxivClusteringP2P (default)
793
- revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40
794
- split: test
795
  type: mteb/biorxiv-clustering-p2p
 
 
 
796
  metrics:
797
  - type: v_measure
798
  value: 35.8271
@@ -800,14 +804,14 @@ model-index:
800
  value: 0.7191000000000001
801
  - type: main_score
802
  value: 35.8271
803
- task:
804
  type: Clustering
805
- - dataset:
806
- config: default
807
  name: MTEB BiorxivClusteringS2S (default)
808
- revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908
809
- split: test
810
  type: mteb/biorxiv-clustering-s2s
 
 
 
811
  metrics:
812
  - type: v_measure
813
  value: 30.3905
@@ -815,14 +819,14 @@ model-index:
815
  value: 0.7136
816
  - type: main_score
817
  value: 30.3905
818
- task:
819
- type: Clustering
820
- - dataset:
821
- config: python
822
  name: MTEB COIRCodeSearchNetRetrieval (python)
823
- revision: 4adc7bc41202b5c13543c9c886a25f340634dab3
824
- split: test
825
  type: CoIR-Retrieval/CodeSearchNet
 
 
 
826
  metrics:
827
  - type: ndcg_at_1
828
  value: 83.22800000000001
@@ -1106,14 +1110,14 @@ model-index:
1106
  value: 89.1168
1107
  - type: main_score
1108
  value: 88.789
1109
- task:
1110
  type: Retrieval
1111
- - dataset:
1112
- config: javascript
1113
  name: MTEB COIRCodeSearchNetRetrieval (javascript)
1114
- revision: 4adc7bc41202b5c13543c9c886a25f340634dab3
1115
- split: test
1116
  type: CoIR-Retrieval/CodeSearchNet
 
 
 
1117
  metrics:
1118
  - type: ndcg_at_1
1119
  value: 29.14
@@ -1397,14 +1401,14 @@ model-index:
1397
  value: 55.3852
1398
  - type: main_score
1399
  value: 38.778
1400
- task:
1401
  type: Retrieval
1402
- - dataset:
1403
- config: go
1404
  name: MTEB COIRCodeSearchNetRetrieval (go)
1405
- revision: 4adc7bc41202b5c13543c9c886a25f340634dab3
1406
- split: test
1407
  type: CoIR-Retrieval/CodeSearchNet
 
 
 
1408
  metrics:
1409
  - type: ndcg_at_1
1410
  value: 42.809999999999995
@@ -1688,14 +1692,14 @@ model-index:
1688
  value: 56.725300000000004
1689
  - type: main_score
1690
  value: 56.296
1691
- task:
1692
  type: Retrieval
1693
- - dataset:
1694
- config: ruby
1695
  name: MTEB COIRCodeSearchNetRetrieval (ruby)
1696
- revision: 4adc7bc41202b5c13543c9c886a25f340634dab3
1697
- split: test
1698
  type: CoIR-Retrieval/CodeSearchNet
 
 
 
1699
  metrics:
1700
  - type: ndcg_at_1
1701
  value: 31.721
@@ -1979,14 +1983,14 @@ model-index:
1979
  value: 53.4268
1980
  - type: main_score
1981
  value: 42.536
1982
- task:
1983
  type: Retrieval
1984
- - dataset:
1985
- config: java
1986
  name: MTEB COIRCodeSearchNetRetrieval (java)
1987
- revision: 4adc7bc41202b5c13543c9c886a25f340634dab3
1988
- split: test
1989
  type: CoIR-Retrieval/CodeSearchNet
 
 
 
1990
  metrics:
1991
  - type: ndcg_at_1
1992
  value: 36.887
@@ -2270,14 +2274,14 @@ model-index:
2270
  value: 58.3678
2271
  - type: main_score
2272
  value: 48.54
2273
- task:
2274
  type: Retrieval
2275
- - dataset:
2276
- config: php
2277
  name: MTEB COIRCodeSearchNetRetrieval (php)
2278
- revision: 4adc7bc41202b5c13543c9c886a25f340634dab3
2279
- split: test
2280
  type: CoIR-Retrieval/CodeSearchNet
 
 
 
2281
  metrics:
2282
  - type: ndcg_at_1
2283
  value: 30.734
@@ -2561,14 +2565,14 @@ model-index:
2561
  value: 50.2532
2562
  - type: main_score
2563
  value: 42.510999999999996
2564
- task:
2565
  type: Retrieval
2566
- - dataset:
2567
- config: default
2568
  name: MTEB CQADupstackAndroidRetrieval (default)
2569
- revision: f46a197baaae43b4f621051089b82a364682dfeb
2570
- split: test
2571
  type: mteb/cqadupstack-android
 
 
 
2572
  metrics:
2573
  - type: ndcg_at_1
2574
  value: 42.918
@@ -2852,14 +2856,14 @@ model-index:
2852
  value: 46.812799999999996
2853
  - type: main_score
2854
  value: 53.047999999999995
2855
- task:
2856
  type: Retrieval
2857
- - dataset:
2858
- config: default
2859
  name: MTEB CQADupstackEnglishRetrieval (default)
2860
- revision: ad9991cb51e31e31e430383c75ffb2885547b5f0
2861
- split: test
2862
  type: mteb/cqadupstack-english
 
 
 
2863
  metrics:
2864
  - type: ndcg_at_1
2865
  value: 35.796
@@ -3143,14 +3147,14 @@ model-index:
3143
  value: 44.8928
3144
  - type: main_score
3145
  value: 43.868
3146
- task:
3147
  type: Retrieval
3148
- - dataset:
3149
- config: default
3150
  name: MTEB CQADupstackGamingRetrieval (default)
3151
- revision: 4885aa143210c98657558c04aaf3dc47cfb54340
3152
- split: test
3153
  type: mteb/cqadupstack-gaming
 
 
 
3154
  metrics:
3155
  - type: ndcg_at_1
3156
  value: 43.448
@@ -3434,14 +3438,14 @@ model-index:
3434
  value: 52.121399999999994
3435
  - type: main_score
3436
  value: 56.369
3437
- task:
3438
  type: Retrieval
3439
- - dataset:
3440
- config: default
3441
  name: MTEB CQADupstackGisRetrieval (default)
3442
- revision: 5003b3064772da1887988e05400cf3806fe491f2
3443
- split: test
3444
  type: mteb/cqadupstack-gis
 
 
 
3445
  metrics:
3446
  - type: ndcg_at_1
3447
  value: 31.863999999999997
@@ -3725,14 +3729,14 @@ model-index:
3725
  value: 38.4264
3726
  - type: main_score
3727
  value: 43.503
3728
- task:
3729
  type: Retrieval
3730
- - dataset:
3731
- config: default
3732
  name: MTEB CQADupstackMathematicaRetrieval (default)
3733
- revision: 90fceea13679c63fe563ded68f3b6f06e50061de
3734
- split: test
3735
  type: mteb/cqadupstack-mathematica
 
 
 
3736
  metrics:
3737
  - type: ndcg_at_1
3738
  value: 22.637
@@ -4016,14 +4020,14 @@ model-index:
4016
  value: 30.9234
4017
  - type: main_score
4018
  value: 32.024
4019
- task:
4020
  type: Retrieval
4021
- - dataset:
4022
- config: default
4023
  name: MTEB CQADupstackPhysicsRetrieval (default)
4024
- revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4
4025
- split: test
4026
  type: mteb/cqadupstack-physics
 
 
 
4027
  metrics:
4028
  - type: ndcg_at_1
4029
  value: 36.477
@@ -4307,14 +4311,14 @@ model-index:
4307
  value: 50.6012
4308
  - type: main_score
4309
  value: 47.316
4310
- task:
4311
  type: Retrieval
4312
- - dataset:
4313
- config: default
4314
  name: MTEB CQADupstackProgrammersRetrieval (default)
4315
- revision: 6184bc1440d2dbc7612be22b50686b8826d22b32
4316
- split: test
4317
  type: mteb/cqadupstack-programmers
 
 
 
4318
  metrics:
4319
  - type: ndcg_at_1
4320
  value: 33.676
@@ -4598,14 +4602,14 @@ model-index:
4598
  value: 44.222
4599
  - type: main_score
4600
  value: 43.580999999999996
4601
- task:
4602
  type: Retrieval
4603
- - dataset:
4604
- config: default
4605
  name: MTEB CQADupstackRetrieval (default)
4606
- revision: 160c094312a0e1facb97e55eeddb698c0abe3571
4607
- split: test
4608
  type: CQADupstackRetrieval_is_a_combined_dataset
 
 
 
4609
  metrics:
4610
  - type: ndcg_at_1
4611
  value: 32.588499999999996
@@ -4889,27 +4893,27 @@ model-index:
4889
  value: 43.299625
4890
  - type: main_score
4891
  value: 42.74341666666667
4892
- task:
4893
  type: Retrieval
4894
- - dataset:
4895
- config: default
4896
  name: MTEB CQADupstackRetrieval (default)
4897
- revision: CQADupstackRetrieval_is_a_combined_dataset
4898
- split: test
4899
  type: CQADupstackRetrieval_is_a_combined_dataset
 
 
 
4900
  metrics:
4901
  - type: main_score
4902
  value: 42.743416666666675
4903
  - type: ndcg_at_10
4904
  value: 42.743416666666675
4905
- task:
4906
  type: Retrieval
4907
- - dataset:
4908
- config: default
4909
  name: MTEB CQADupstackStatsRetrieval (default)
4910
- revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a
4911
- split: test
4912
  type: mteb/cqadupstack-stats
 
 
 
4913
  metrics:
4914
  - type: ndcg_at_1
4915
  value: 27.607
@@ -5193,14 +5197,14 @@ model-index:
5193
  value: 50.3081
5194
  - type: main_score
5195
  value: 36.796
5196
- task:
5197
  type: Retrieval
5198
- - dataset:
5199
- config: default
5200
  name: MTEB CQADupstackTexRetrieval (default)
5201
- revision: 46989137a86843e03a6195de44b09deda022eec7
5202
- split: test
5203
  type: mteb/cqadupstack-tex
 
 
 
5204
  metrics:
5205
  - type: ndcg_at_1
5206
  value: 23.159
@@ -5484,14 +5488,14 @@ model-index:
5484
  value: 36.5259
5485
  - type: main_score
5486
  value: 31.775
5487
- task:
5488
  type: Retrieval
5489
- - dataset:
5490
- config: default
5491
  name: MTEB CQADupstackUnixRetrieval (default)
5492
- revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53
5493
- split: test
5494
  type: mteb/cqadupstack-unix
 
 
 
5495
  metrics:
5496
  - type: ndcg_at_1
5497
  value: 34.981
@@ -5775,14 +5779,14 @@ model-index:
5775
  value: 49.8286
5776
  - type: main_score
5777
  value: 45.275
5778
- task:
5779
  type: Retrieval
5780
- - dataset:
5781
- config: default
5782
  name: MTEB CQADupstackWebmastersRetrieval (default)
5783
- revision: 160c094312a0e1facb97e55eeddb698c0abe3571
5784
- split: test
5785
  type: mteb/cqadupstack-webmasters
 
 
 
5786
  metrics:
5787
  - type: ndcg_at_1
5788
  value: 32.806000000000004
@@ -6066,14 +6070,14 @@ model-index:
6066
  value: 42.3078
6067
  - type: main_score
6068
  value: 42.957
6069
- task:
6070
  type: Retrieval
6071
- - dataset:
6072
- config: default
6073
  name: MTEB CQADupstackWordpressRetrieval (default)
6074
- revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4
6075
- split: test
6076
  type: mteb/cqadupstack-wordpress
 
 
 
6077
  metrics:
6078
  - type: ndcg_at_1
6079
  value: 25.692999999999998
@@ -6357,14 +6361,14 @@ model-index:
6357
  value: 32.6251
6358
  - type: main_score
6359
  value: 36.409000000000006
6360
- task:
6361
  type: Retrieval
6362
- - dataset:
6363
- config: default
6364
  name: MTEB ClimateFEVER (default)
6365
- revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380
6366
- split: test
6367
  type: mteb/climate-fever
 
 
 
6368
  metrics:
6369
  - type: ndcg_at_1
6370
  value: 26.971
@@ -6648,14 +6652,14 @@ model-index:
6648
  value: 23.974899999999998
6649
  - type: main_score
6650
  value: 29.494
6651
- task:
6652
  type: Retrieval
6653
- - dataset:
6654
- config: default
6655
  name: MTEB CodeFeedbackMT (default)
6656
- revision: b0f12fa0c0dd67f59c95a5c33d02aeeb4c398c5f
6657
- split: test
6658
  type: CoIR-Retrieval/codefeedback-mt
 
 
 
6659
  metrics:
6660
  - type: ndcg_at_1
6661
  value: 21.044
@@ -6939,14 +6943,14 @@ model-index:
6939
  value: 41.7802
6940
  - type: main_score
6941
  value: 31.391999999999996
6942
- task:
6943
  type: Retrieval
6944
- - dataset:
6945
- config: default
6946
  name: MTEB CodeFeedbackST (default)
6947
- revision: d213819e87aab9010628da8b73ab4eb337c89340
6948
- split: test
6949
  type: CoIR-Retrieval/codefeedback-st
 
 
 
6950
  metrics:
6951
  - type: ndcg_at_1
6952
  value: 51.227000000000004
@@ -7230,14 +7234,14 @@ model-index:
7230
  value: 63.382000000000005
7231
  - type: main_score
7232
  value: 67.72200000000001
7233
- task:
7234
  type: Retrieval
7235
- - dataset:
7236
- config: python
7237
  name: MTEB CodeSearchNetCCRetrieval (python)
7238
- revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8
7239
- split: test
7240
  type: CoIR-Retrieval/CodeSearchNet-ccr
 
 
 
7241
  metrics:
7242
  - type: ndcg_at_1
7243
  value: 32.417
@@ -7521,14 +7525,14 @@ model-index:
7521
  value: 50.1074
7522
  - type: main_score
7523
  value: 45.532000000000004
7524
- task:
7525
  type: Retrieval
7526
- - dataset:
7527
- config: javascript
7528
  name: MTEB CodeSearchNetCCRetrieval (javascript)
7529
- revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8
7530
- split: test
7531
  type: CoIR-Retrieval/CodeSearchNet-ccr
 
 
 
7532
  metrics:
7533
  - type: ndcg_at_1
7534
  value: 33.364
@@ -7812,14 +7816,14 @@ model-index:
7812
  value: 48.0248
7813
  - type: main_score
7814
  value: 46.024
7815
- task:
7816
  type: Retrieval
7817
- - dataset:
7818
- config: go
7819
  name: MTEB CodeSearchNetCCRetrieval (go)
7820
- revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8
7821
- split: test
7822
  type: CoIR-Retrieval/CodeSearchNet-ccr
 
 
 
7823
  metrics:
7824
  - type: ndcg_at_1
7825
  value: 26.471
@@ -8103,14 +8107,14 @@ model-index:
8103
  value: 41.4381
8104
  - type: main_score
8105
  value: 37.555
8106
- task:
8107
  type: Retrieval
8108
- - dataset:
8109
- config: ruby
8110
  name: MTEB CodeSearchNetCCRetrieval (ruby)
8111
- revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8
8112
- split: test
8113
  type: CoIR-Retrieval/CodeSearchNet-ccr
 
 
 
8114
  metrics:
8115
  - type: ndcg_at_1
8116
  value: 36.003
@@ -8394,14 +8398,14 @@ model-index:
8394
  value: 51.7548
8395
  - type: main_score
8396
  value: 47.549
8397
- task:
8398
  type: Retrieval
8399
- - dataset:
8400
- config: java
8401
  name: MTEB CodeSearchNetCCRetrieval (java)
8402
- revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8
8403
- split: test
8404
  type: CoIR-Retrieval/CodeSearchNet-ccr
 
 
 
8405
  metrics:
8406
  - type: ndcg_at_1
8407
  value: 33.355000000000004
@@ -8685,14 +8689,14 @@ model-index:
8685
  value: 48.8277
8686
  - type: main_score
8687
  value: 45.539
8688
- task:
8689
  type: Retrieval
8690
- - dataset:
8691
- config: php
8692
  name: MTEB CodeSearchNetCCRetrieval (php)
8693
- revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8
8694
- split: test
8695
  type: CoIR-Retrieval/CodeSearchNet-ccr
 
 
 
8696
  metrics:
8697
  - type: ndcg_at_1
8698
  value: 25.139
@@ -8976,14 +8980,14 @@ model-index:
8976
  value: 45.191900000000004
8977
  - type: main_score
8978
  value: 35.942
8979
- task:
8980
  type: Retrieval
8981
- - dataset:
8982
- config: python
8983
  name: MTEB CodeSearchNetRetrieval (python)
8984
- revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759
8985
- split: test
8986
  type: code-search-net/code_search_net
 
 
 
8987
  metrics:
8988
  - type: ndcg_at_1
8989
  value: 70.89999999999999
@@ -9267,14 +9271,14 @@ model-index:
9267
  value: 68.3376
9268
  - type: main_score
9269
  value: 83.12
9270
- task:
9271
  type: Retrieval
9272
- - dataset:
9273
- config: javascript
9274
  name: MTEB CodeSearchNetRetrieval (javascript)
9275
- revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759
9276
- split: test
9277
  type: code-search-net/code_search_net
 
 
 
9278
  metrics:
9279
  - type: ndcg_at_1
9280
  value: 57.99999999999999
@@ -9558,14 +9562,14 @@ model-index:
9558
  value: 67.08579999999999
9559
  - type: main_score
9560
  value: 70.34
9561
- task:
9562
  type: Retrieval
9563
- - dataset:
9564
- config: go
9565
  name: MTEB CodeSearchNetRetrieval (go)
9566
- revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759
9567
- split: test
9568
  type: code-search-net/code_search_net
 
 
 
9569
  metrics:
9570
  - type: ndcg_at_1
9571
  value: 75.6
@@ -9849,14 +9853,14 @@ model-index:
9849
  value: 74.4583
9850
  - type: main_score
9851
  value: 86.139
9852
- task:
9853
  type: Retrieval
9854
- - dataset:
9855
- config: ruby
9856
  name: MTEB CodeSearchNetRetrieval (ruby)
9857
- revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759
9858
- split: test
9859
  type: code-search-net/code_search_net
 
 
 
9860
  metrics:
9861
  - type: ndcg_at_1
9862
  value: 61.3
@@ -10140,14 +10144,14 @@ model-index:
10140
  value: 69.0767
10141
  - type: main_score
10142
  value: 74.736
10143
- task:
10144
  type: Retrieval
10145
- - dataset:
10146
- config: java
10147
  name: MTEB CodeSearchNetRetrieval (java)
10148
- revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759
10149
- split: test
10150
  type: code-search-net/code_search_net
 
 
 
10151
  metrics:
10152
  - type: ndcg_at_1
10153
  value: 55.1
@@ -10431,14 +10435,14 @@ model-index:
10431
  value: 60.16010000000001
10432
  - type: main_score
10433
  value: 70.89
10434
- task:
10435
  type: Retrieval
10436
- - dataset:
10437
- config: php
10438
  name: MTEB CodeSearchNetRetrieval (php)
10439
- revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759
10440
- split: test
10441
  type: code-search-net/code_search_net
 
 
 
10442
  metrics:
10443
  - type: ndcg_at_1
10444
  value: 56.89999999999999
@@ -10722,14 +10726,14 @@ model-index:
10722
  value: 63.048300000000005
10723
  - type: main_score
10724
  value: 72.92999999999999
10725
- task:
10726
  type: Retrieval
10727
- - dataset:
10728
- config: default
10729
  name: MTEB CodeTransOceanContest (default)
10730
- revision: 20da4eb20a4b17300c0986ee148c90867a7f2a4d
10731
- split: test
10732
  type: CoIR-Retrieval/codetrans-contest
 
 
 
10733
  metrics:
10734
  - type: ndcg_at_1
10735
  value: 50.226000000000006
@@ -11013,14 +11017,14 @@ model-index:
11013
  value: 71.28710000000001
11014
  - type: main_score
11015
  value: 60.831
11016
- task:
11017
  type: Retrieval
11018
- - dataset:
11019
- config: default
11020
  name: MTEB CodeTransOceanDL (default)
11021
- revision: 281562cb8a1265ab5c0824bfa6ddcd9b0a15618f
11022
- split: test
11023
  type: CoIR-Retrieval/codetrans-dl
 
 
 
11024
  metrics:
11025
  - type: ndcg_at_1
11026
  value: 8.889
@@ -11304,14 +11308,14 @@ model-index:
11304
  value: 9.591
11305
  - type: main_score
11306
  value: 32.138
11307
- task:
11308
  type: Retrieval
11309
- - dataset:
11310
- config: default
11311
  name: MTEB CosQA (default)
11312
- revision: bc5efb7e9d437246ce393ed19d772e08e4a79535
11313
- split: test
11314
  type: CoIR-Retrieval/cosqa
 
 
 
11315
  metrics:
11316
  - type: ndcg_at_1
11317
  value: 14.6
@@ -11595,14 +11599,14 @@ model-index:
11595
  value: 24.0998
11596
  - type: main_score
11597
  value: 33.452
11598
- task:
11599
  type: Retrieval
11600
- - dataset:
11601
- config: default
11602
  name: MTEB DBPedia (default)
11603
- revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659
11604
- split: test
11605
  type: mteb/dbpedia
 
 
 
11606
  metrics:
11607
  - type: ndcg_at_1
11608
  value: 48.75
@@ -11886,14 +11890,14 @@ model-index:
11886
  value: 47.7958
11887
  - type: main_score
11888
  value: 34.565
11889
- task:
11890
- type: Retrieval
11891
- - dataset:
11892
- config: default
11893
  name: MTEB EmotionClassification (default)
11894
- revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37
11895
- split: test
11896
  type: mteb/emotion
 
 
 
11897
  metrics:
11898
  - type: accuracy
11899
  value: 36.449999999999996
@@ -11903,14 +11907,14 @@ model-index:
11903
  value: 38.7818
11904
  - type: main_score
11905
  value: 36.449999999999996
11906
- task:
11907
- type: Classification
11908
- - dataset:
11909
- config: default
11910
  name: MTEB FEVER (default)
11911
- revision: bea83ef9e8fb933d90a2f1d5515737465d613e12
11912
- split: test
11913
  type: mteb/fever
 
 
 
11914
  metrics:
11915
  - type: ndcg_at_1
11916
  value: 77.93299999999999
@@ -12194,14 +12198,14 @@ model-index:
12194
  value: 64.1637
12195
  - type: main_score
12196
  value: 84.932
12197
- task:
12198
  type: Retrieval
12199
- - dataset:
12200
- config: default
12201
  name: MTEB FiQA2018 (default)
12202
- revision: 27a168819829fe9bcd655c2df245fb19452e8e06
12203
- split: test
12204
  type: mteb/fiqa
 
 
 
12205
  metrics:
12206
  - type: ndcg_at_1
12207
  value: 34.259
@@ -12485,14 +12489,14 @@ model-index:
12485
  value: 46.399699999999996
12486
  - type: main_score
12487
  value: 35.663
12488
- task:
12489
  type: Retrieval
12490
- - dataset:
12491
- config: default
12492
  name: MTEB HotpotQA (default)
12493
- revision: ab518f4d6fcca38d87c25209f94beba119d02014
12494
- split: test
12495
  type: mteb/hotpotqa
 
 
 
12496
  metrics:
12497
  - type: ndcg_at_1
12498
  value: 75.908
@@ -12776,14 +12780,14 @@ model-index:
12776
  value: 69.503
12777
  - type: main_score
12778
  value: 61.513
12779
- task:
12780
- type: Retrieval
12781
- - dataset:
12782
- config: default
12783
  name: MTEB ImdbClassification (default)
12784
- revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7
12785
- split: test
12786
  type: mteb/imdb
 
 
 
12787
  metrics:
12788
  - type: accuracy
12789
  value: 63.0232
@@ -12797,14 +12801,14 @@ model-index:
12797
  value: 58.377199999999995
12798
  - type: main_score
12799
  value: 63.0232
12800
- task:
12801
- type: Classification
12802
- - dataset:
12803
- config: ar
12804
  name: MTEB MIRACLRetrieval (ar)
12805
- revision: main
12806
- split: dev
12807
  type: miracl/mmteb-miracl
 
 
 
12808
  metrics:
12809
  - type: ndcg_at_1
12810
  value: 57.459
@@ -13088,14 +13092,14 @@ model-index:
13088
  value: 41.6879
13089
  - type: main_score
13090
  value: 64.238
13091
- task:
13092
  type: Retrieval
13093
- - dataset:
13094
- config: bn
13095
  name: MTEB MIRACLRetrieval (bn)
13096
- revision: main
13097
- split: dev
13098
  type: miracl/mmteb-miracl
 
 
 
13099
  metrics:
13100
  - type: ndcg_at_1
13101
  value: 60.341
@@ -13379,14 +13383,14 @@ model-index:
13379
  value: 40.7358
13380
  - type: main_score
13381
  value: 68.05499999999999
13382
- task:
13383
  type: Retrieval
13384
- - dataset:
13385
- config: de
13386
  name: MTEB MIRACLRetrieval (de)
13387
- revision: main
13388
- split: dev
13389
  type: miracl/mmteb-miracl
 
 
 
13390
  metrics:
13391
  - type: ndcg_at_1
13392
  value: 45.574
@@ -13670,14 +13674,14 @@ model-index:
13670
  value: 42.620000000000005
13671
  - type: main_score
13672
  value: 48.123
13673
- task:
13674
  type: Retrieval
13675
- - dataset:
13676
- config: en
13677
  name: MTEB MIRACLRetrieval (en)
13678
- revision: main
13679
- split: dev
13680
  type: miracl/mmteb-miracl
 
 
 
13681
  metrics:
13682
  - type: ndcg_at_1
13683
  value: 45.556999999999995
@@ -13961,14 +13965,14 @@ model-index:
13961
  value: 30.0019
13962
  - type: main_score
13963
  value: 49.372
13964
- task:
13965
  type: Retrieval
13966
- - dataset:
13967
- config: es
13968
  name: MTEB MIRACLRetrieval (es)
13969
- revision: main
13970
- split: dev
13971
  type: miracl/mmteb-miracl
 
 
 
13972
  metrics:
13973
  - type: ndcg_at_1
13974
  value: 55.71
@@ -14252,14 +14256,14 @@ model-index:
14252
  value: 32.1927
14253
  - type: main_score
14254
  value: 49.688
14255
- task:
14256
  type: Retrieval
14257
- - dataset:
14258
- config: fa
14259
  name: MTEB MIRACLRetrieval (fa)
14260
- revision: main
14261
- split: dev
14262
  type: miracl/mmteb-miracl
 
 
 
14263
  metrics:
14264
  - type: ndcg_at_1
14265
  value: 39.873
@@ -14543,14 +14547,14 @@ model-index:
14543
  value: 20.4831
14544
  - type: main_score
14545
  value: 50.226000000000006
14546
- task:
14547
  type: Retrieval
14548
- - dataset:
14549
- config: fi
14550
  name: MTEB MIRACLRetrieval (fi)
14551
- revision: main
14552
- split: dev
14553
  type: miracl/mmteb-miracl
 
 
 
14554
  metrics:
14555
  - type: ndcg_at_1
14556
  value: 60.818000000000005
@@ -14834,14 +14838,14 @@ model-index:
14834
  value: 48.753600000000006
14835
  - type: main_score
14836
  value: 67.46
14837
- task:
14838
  type: Retrieval
14839
- - dataset:
14840
- config: fr
14841
  name: MTEB MIRACLRetrieval (fr)
14842
- revision: main
14843
- split: dev
14844
  type: miracl/mmteb-miracl
 
 
 
14845
  metrics:
14846
  - type: ndcg_at_1
14847
  value: 39.65
@@ -15125,14 +15129,14 @@ model-index:
15125
  value: 22.6855
15126
  - type: main_score
15127
  value: 49.891000000000005
15128
- task:
15129
  type: Retrieval
15130
- - dataset:
15131
- config: hi
15132
  name: MTEB MIRACLRetrieval (hi)
15133
- revision: main
15134
- split: dev
15135
  type: miracl/mmteb-miracl
 
 
 
15136
  metrics:
15137
  - type: ndcg_at_1
15138
  value: 36.857
@@ -15416,14 +15420,14 @@ model-index:
15416
  value: 35.1808
15417
  - type: main_score
15418
  value: 46.141
15419
- task:
15420
  type: Retrieval
15421
- - dataset:
15422
- config: id
15423
  name: MTEB MIRACLRetrieval (id)
15424
- revision: main
15425
- split: dev
15426
  type: miracl/mmteb-miracl
 
 
 
15427
  metrics:
15428
  - type: ndcg_at_1
15429
  value: 46.354
@@ -15707,14 +15711,14 @@ model-index:
15707
  value: 27.054000000000002
15708
  - type: main_score
15709
  value: 47.229
15710
- task:
15711
  type: Retrieval
15712
- - dataset:
15713
- config: ja
15714
  name: MTEB MIRACLRetrieval (ja)
15715
- revision: main
15716
- split: dev
15717
  type: miracl/mmteb-miracl
 
 
 
15718
  metrics:
15719
  - type: ndcg_at_1
15720
  value: 56.279
@@ -15998,14 +16002,14 @@ model-index:
15998
  value: 42.1768
15999
  - type: main_score
16000
  value: 62.81
16001
- task:
16002
  type: Retrieval
16003
- - dataset:
16004
- config: ko
16005
  name: MTEB MIRACLRetrieval (ko)
16006
- revision: main
16007
- split: dev
16008
  type: miracl/mmteb-miracl
 
 
 
16009
  metrics:
16010
  - type: ndcg_at_1
16011
  value: 52.581999999999994
@@ -16289,14 +16293,14 @@ model-index:
16289
  value: 41.166199999999996
16290
  - type: main_score
16291
  value: 59.216
16292
- task:
16293
  type: Retrieval
16294
- - dataset:
16295
- config: ru
16296
  name: MTEB MIRACLRetrieval (ru)
16297
- revision: main
16298
- split: dev
16299
  type: miracl/mmteb-miracl
 
 
 
16300
  metrics:
16301
  - type: ndcg_at_1
16302
  value: 47.524
@@ -16580,14 +16584,14 @@ model-index:
16580
  value: 33.353300000000004
16581
  - type: main_score
16582
  value: 52.349000000000004
16583
- task:
16584
  type: Retrieval
16585
- - dataset:
16586
- config: sw
16587
  name: MTEB MIRACLRetrieval (sw)
16588
- revision: main
16589
- split: dev
16590
  type: miracl/mmteb-miracl
 
 
 
16591
  metrics:
16592
  - type: ndcg_at_1
16593
  value: 51.66
@@ -16871,14 +16875,14 @@ model-index:
16871
  value: 36.696400000000004
16872
  - type: main_score
16873
  value: 61.271
16874
- task:
16875
  type: Retrieval
16876
- - dataset:
16877
- config: te
16878
  name: MTEB MIRACLRetrieval (te)
16879
- revision: main
16880
- split: dev
16881
  type: miracl/mmteb-miracl
 
 
 
16882
  metrics:
16883
  - type: ndcg_at_1
16884
  value: 63.647
@@ -17162,14 +17166,14 @@ model-index:
17162
  value: 59.1847
17163
  - type: main_score
17164
  value: 79.149
17165
- task:
17166
  type: Retrieval
17167
- - dataset:
17168
- config: th
17169
  name: MTEB MIRACLRetrieval (th)
17170
- revision: main
17171
- split: dev
17172
  type: miracl/mmteb-miracl
 
 
 
17173
  metrics:
17174
  - type: ndcg_at_1
17175
  value: 66.712
@@ -17453,14 +17457,14 @@ model-index:
17453
  value: 46.5276
17454
  - type: main_score
17455
  value: 73.324
17456
- task:
17457
  type: Retrieval
17458
- - dataset:
17459
- config: yo
17460
  name: MTEB MIRACLRetrieval (yo)
17461
- revision: main
17462
- split: dev
17463
  type: miracl/mmteb-miracl
 
 
 
17464
  metrics:
17465
  - type: ndcg_at_1
17466
  value: 49.58
@@ -17744,14 +17748,14 @@ model-index:
17744
  value: 50.3215
17745
  - type: main_score
17746
  value: 68.705
17747
- task:
17748
  type: Retrieval
17749
- - dataset:
17750
- config: zh
17751
  name: MTEB MIRACLRetrieval (zh)
17752
- revision: main
17753
- split: dev
17754
  type: miracl/mmteb-miracl
 
 
 
17755
  metrics:
17756
  - type: ndcg_at_1
17757
  value: 47.583
@@ -18035,14 +18039,14 @@ model-index:
18035
  value: 29.526799999999998
18036
  - type: main_score
18037
  value: 52.553000000000004
18038
- task:
18039
  type: Retrieval
18040
- - dataset:
18041
- config: default
18042
  name: MTEB MSMARCO (default)
18043
- revision: c5a29a104738b98a9e76336939199e264163d4a0
18044
- split: dev
18045
  type: mteb/msmarco
 
 
 
18046
  metrics:
18047
  - type: ndcg_at_1
18048
  value: 14.155000000000001
@@ -18326,14 +18330,14 @@ model-index:
18326
  value: 24.7274
18327
  - type: main_score
18328
  value: 29.866999999999997
18329
- task:
18330
- type: Retrieval
18331
- - dataset:
18332
- config: en
18333
  name: MTEB MTOPDomainClassification (en)
18334
- revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf
18335
- split: test
18336
  type: mteb/mtop_domain
 
 
 
18337
  metrics:
18338
  - type: accuracy
18339
  value: 89.89970000000001
@@ -18343,14 +18347,14 @@ model-index:
18343
  value: 89.8682
18344
  - type: main_score
18345
  value: 89.89970000000001
18346
- task:
18347
  type: Classification
18348
- - dataset:
18349
- config: en
18350
  name: MTEB MTOPIntentClassification (en)
18351
- revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba
18352
- split: test
18353
  type: mteb/mtop_intent
 
 
 
18354
  metrics:
18355
  - type: accuracy
18356
  value: 60.26899999999999
@@ -18360,14 +18364,14 @@ model-index:
18360
  value: 63.033899999999996
18361
  - type: main_score
18362
  value: 60.26899999999999
18363
- task:
18364
  type: Classification
18365
- - dataset:
18366
- config: en
18367
  name: MTEB MassiveIntentClassification (en)
18368
- revision: 4672e20407010da34463acc759c162ca9734bca6
18369
- split: test
18370
  type: mteb/amazon_massive_intent
 
 
 
18371
  metrics:
18372
  - type: accuracy
18373
  value: 63.9509
@@ -18377,14 +18381,14 @@ model-index:
18377
  value: 62.8
18378
  - type: main_score
18379
  value: 63.9509
18380
- task:
18381
  type: Classification
18382
- - dataset:
18383
- config: en
18384
  name: MTEB MassiveScenarioClassification (en)
18385
- revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8
18386
- split: test
18387
  type: mteb/amazon_massive_scenario
 
 
 
18388
  metrics:
18389
  - type: accuracy
18390
  value: 70.928
@@ -18394,29 +18398,29 @@ model-index:
18394
  value: 70.6366
18395
  - type: main_score
18396
  value: 70.928
18397
- task:
18398
- type: Classification
18399
- - dataset:
18400
- config: default
18401
  name: MTEB MedrxivClusteringP2P (default)
18402
- revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73
18403
- split: test
18404
  type: mteb/medrxiv-clustering-p2p
18405
- metrics:
 
 
 
18406
  - type: v_measure
18407
  value: 31.522
18408
  - type: v_measure_std
18409
  value: 1.5528
18410
  - type: main_score
18411
  value: 31.522
18412
- task:
18413
  type: Clustering
18414
- - dataset:
18415
- config: default
18416
  name: MTEB MedrxivClusteringS2S (default)
18417
- revision: 35191c8c0dca72d8ff3efcd72aa802307d469663
18418
- split: test
18419
  type: mteb/medrxiv-clustering-s2s
 
 
 
18420
  metrics:
18421
  - type: v_measure
18422
  value: 28.572599999999998
@@ -18424,14 +18428,14 @@ model-index:
18424
  value: 1.8154
18425
  - type: main_score
18426
  value: 28.572599999999998
18427
- task:
18428
- type: Clustering
18429
- - dataset:
18430
- config: default
18431
  name: MTEB MindSmallReranking (default)
18432
- revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7
18433
- split: test
18434
  type: mteb/mind_small
 
 
 
18435
  metrics:
18436
  - type: map
18437
  value: 30.5381
@@ -18451,14 +18455,14 @@ model-index:
18451
  value: 13.2721
18452
  - type: main_score
18453
  value: 30.5381
18454
- task:
18455
- type: Reranking
18456
- - dataset:
18457
- config: default
18458
  name: MTEB NFCorpus (default)
18459
- revision: ec0fa4fe99da2ff19ca1214b7966684033a58814
18460
- split: test
18461
  type: mteb/nfcorpus
 
 
 
18462
  metrics:
18463
  - type: ndcg_at_1
18464
  value: 38.080000000000005
@@ -18742,14 +18746,14 @@ model-index:
18742
  value: 34.3718
18743
  - type: main_score
18744
  value: 28.903000000000002
18745
- task:
18746
  type: Retrieval
18747
- - dataset:
18748
- config: default
18749
  name: MTEB NQ (default)
18750
- revision: b774495ed302d8c44a3a7ea25c90dbce03968f31
18751
- split: test
18752
  type: mteb/nq
 
 
 
18753
  metrics:
18754
  - type: ndcg_at_1
18755
  value: 34.589
@@ -19033,14 +19037,14 @@ model-index:
19033
  value: 32.6192
19034
  - type: main_score
19035
  value: 53.410000000000004
19036
- task:
19037
  type: Retrieval
19038
- - dataset:
19039
- config: default
19040
  name: MTEB QuoraRetrieval (default)
19041
- revision: e4e08e0b7dbe3c8700f0daef558ff32256715259
19042
- split: test
19043
  type: mteb/quora
 
 
 
19044
  metrics:
19045
  - type: ndcg_at_1
19046
  value: 79.64
@@ -19324,14 +19328,14 @@ model-index:
19324
  value: 75.84349999999999
19325
  - type: main_score
19326
  value: 86.871
19327
- task:
19328
- type: Retrieval
19329
- - dataset:
19330
- config: default
19331
  name: MTEB RedditClustering (default)
19332
- revision: 24640382cdbf8abc73003fb0fa6d111a705499eb
19333
- split: test
19334
  type: mteb/reddit-clustering
 
 
 
19335
  metrics:
19336
  - type: v_measure
19337
  value: 45.8568
@@ -19339,14 +19343,14 @@ model-index:
19339
  value: 5.685
19340
  - type: main_score
19341
  value: 45.8568
19342
- task:
19343
  type: Clustering
19344
- - dataset:
19345
- config: default
19346
  name: MTEB RedditClusteringP2P (default)
19347
- revision: 385e3cb46b4cfa89021f56c4380204149d0efe33
19348
- split: test
19349
  type: mteb/reddit-clustering-p2p
 
 
 
19350
  metrics:
19351
  - type: v_measure
19352
  value: 54.9896
@@ -19354,14 +19358,14 @@ model-index:
19354
  value: 12.0517
19355
  - type: main_score
19356
  value: 54.9896
19357
- task:
19358
- type: Clustering
19359
- - dataset:
19360
- config: default
19361
  name: MTEB SCIDOCS (default)
19362
- revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88
19363
- split: test
19364
  type: mteb/scidocs
 
 
 
19365
  metrics:
19366
  - type: ndcg_at_1
19367
  value: 20.599999999999998
@@ -19645,14 +19649,14 @@ model-index:
19645
  value: 23.392599999999998
19646
  - type: main_score
19647
  value: 17.721
19648
- task:
19649
- type: Retrieval
19650
- - dataset:
19651
- config: default
19652
  name: MTEB SICK-R (default)
19653
- revision: 20a6d6f312dd54037fe07a32d58e5e168867909d
19654
- split: test
19655
  type: mteb/sickr-sts
 
 
 
19656
  metrics:
19657
  - type: pearson
19658
  value: 75.5378
@@ -19672,14 +19676,14 @@ model-index:
19672
  value: 68.7448
19673
  - type: main_score
19674
  value: 68.7448
19675
- task:
19676
  type: STS
19677
- - dataset:
19678
- config: default
19679
  name: MTEB STS12 (default)
19680
- revision: a0d554a64d88156834ff5ae9920b964011b16384
19681
- split: test
19682
  type: mteb/sts12-sts
 
 
 
19683
  metrics:
19684
  - type: pearson
19685
  value: 81.6341
@@ -19699,14 +19703,14 @@ model-index:
19699
  value: 75.1934
19700
  - type: main_score
19701
  value: 75.1911
19702
- task:
19703
  type: STS
19704
- - dataset:
19705
- config: default
19706
  name: MTEB STS13 (default)
19707
- revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca
19708
- split: test
19709
  type: mteb/sts13-sts
 
 
 
19710
  metrics:
19711
  - type: pearson
19712
  value: 76.4378
@@ -19726,14 +19730,14 @@ model-index:
19726
  value: 77.3053
19727
  - type: main_score
19728
  value: 77.3053
19729
- task:
19730
  type: STS
19731
- - dataset:
19732
- config: default
19733
  name: MTEB STS14 (default)
19734
- revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375
19735
- split: test
19736
  type: mteb/sts14-sts
 
 
 
19737
  metrics:
19738
  - type: pearson
19739
  value: 78.4342
@@ -19753,14 +19757,14 @@ model-index:
19753
  value: 74.94800000000001
19754
  - type: main_score
19755
  value: 74.9479
19756
- task:
19757
  type: STS
19758
- - dataset:
19759
- config: default
19760
  name: MTEB STS15 (default)
19761
- revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3
19762
- split: test
19763
  type: mteb/sts15-sts
 
 
 
19764
  metrics:
19765
  - type: pearson
19766
  value: 85.1908
@@ -19780,14 +19784,14 @@ model-index:
19780
  value: 86.0174
19781
  - type: main_score
19782
  value: 86.0174
19783
- task:
19784
  type: STS
19785
- - dataset:
19786
- config: default
19787
  name: MTEB STS16 (default)
19788
- revision: 4d8694f8f0e0100860b497b999b3dbed754a0513
19789
- split: test
19790
  type: mteb/sts16-sts
 
 
 
19791
  metrics:
19792
  - type: pearson
19793
  value: 80.5421
@@ -19807,14 +19811,14 @@ model-index:
19807
  value: 81.9568
19808
  - type: main_score
19809
  value: 81.9568
19810
- task:
19811
  type: STS
19812
- - dataset:
19813
- config: en-tr
19814
  name: MTEB STS17 (en-tr)
19815
- revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19816
- split: test
19817
  type: mteb/sts17-crosslingual-sts
 
 
 
19818
  metrics:
19819
  - type: pearson
19820
  value: 48.2717
@@ -19834,14 +19838,14 @@ model-index:
19834
  value: 44.642900000000004
19835
  - type: main_score
19836
  value: 44.642900000000004
19837
- task:
19838
  type: STS
19839
- - dataset:
19840
- config: it-en
19841
  name: MTEB STS17 (it-en)
19842
- revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19843
- split: test
19844
  type: mteb/sts17-crosslingual-sts
 
 
 
19845
  metrics:
19846
  - type: pearson
19847
  value: 67.8601
@@ -19861,14 +19865,14 @@ model-index:
19861
  value: 68.2763
19862
  - type: main_score
19863
  value: 68.2763
19864
- task:
19865
  type: STS
19866
- - dataset:
19867
- config: en-en
19868
  name: MTEB STS17 (en-en)
19869
- revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19870
- split: test
19871
  type: mteb/sts17-crosslingual-sts
 
 
 
19872
  metrics:
19873
  - type: pearson
19874
  value: 78.05539999999999
@@ -19888,14 +19892,14 @@ model-index:
19888
  value: 78.5929
19889
  - type: main_score
19890
  value: 78.5929
19891
- task:
19892
  type: STS
19893
- - dataset:
19894
- config: en-ar
19895
  name: MTEB STS17 (en-ar)
19896
- revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19897
- split: test
19898
  type: mteb/sts17-crosslingual-sts
 
 
 
19899
  metrics:
19900
  - type: pearson
19901
  value: 59.4349
@@ -19915,14 +19919,14 @@ model-index:
19915
  value: 59.838800000000006
19916
  - type: main_score
19917
  value: 59.838800000000006
19918
- task:
19919
  type: STS
19920
- - dataset:
19921
- config: fr-en
19922
  name: MTEB STS17 (fr-en)
19923
- revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19924
- split: test
19925
  type: mteb/sts17-crosslingual-sts
 
 
 
19926
  metrics:
19927
  - type: pearson
19928
  value: 73.84039999999999
@@ -19942,14 +19946,14 @@ model-index:
19942
  value: 74.2498
19943
  - type: main_score
19944
  value: 74.2498
19945
- task:
19946
  type: STS
19947
- - dataset:
19948
- config: nl-en
19949
  name: MTEB STS17 (nl-en)
19950
- revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19951
- split: test
19952
  type: mteb/sts17-crosslingual-sts
 
 
 
19953
  metrics:
19954
  - type: pearson
19955
  value: 67.9218
@@ -19969,14 +19973,14 @@ model-index:
19969
  value: 68.0418
19970
  - type: main_score
19971
  value: 68.0418
19972
- task:
19973
  type: STS
19974
- - dataset:
19975
- config: es-en
19976
  name: MTEB STS17 (es-en)
19977
- revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19978
- split: test
19979
  type: mteb/sts17-crosslingual-sts
 
 
 
19980
  metrics:
19981
  - type: pearson
19982
  value: 70.381
@@ -19996,14 +20000,14 @@ model-index:
19996
  value: 69.5729
19997
  - type: main_score
19998
  value: 69.5729
19999
- task:
20000
  type: STS
20001
- - dataset:
20002
- config: en-de
20003
  name: MTEB STS17 (en-de)
20004
- revision: faeb762787bd10488a50c8b5be4a3b82e411949c
20005
- split: test
20006
  type: mteb/sts17-crosslingual-sts
 
 
 
20007
  metrics:
20008
  - type: pearson
20009
  value: 70.0196
@@ -20023,14 +20027,14 @@ model-index:
20023
  value: 69.7175
20024
  - type: main_score
20025
  value: 69.7175
20026
- task:
20027
  type: STS
20028
- - dataset:
20029
- config: de-en
20030
  name: MTEB STS22 (de-en)
20031
- revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
20032
- split: test
20033
  type: mteb/sts22-crosslingual-sts
 
 
 
20034
  metrics:
20035
  - type: pearson
20036
  value: 65.7536
@@ -20050,14 +20054,14 @@ model-index:
20050
  value: 60.04429999999999
20051
  - type: main_score
20052
  value: 60.04429999999999
20053
- task:
20054
  type: STS
20055
- - dataset:
20056
- config: en
20057
  name: MTEB STS22 (en)
20058
- revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
20059
- split: test
20060
  type: mteb/sts22-crosslingual-sts
 
 
 
20061
  metrics:
20062
  - type: pearson
20063
  value: 68.997
@@ -20077,14 +20081,14 @@ model-index:
20077
  value: 68.1508
20078
  - type: main_score
20079
  value: 68.1508
20080
- task:
20081
  type: STS
20082
- - dataset:
20083
- config: es-en
20084
  name: MTEB STS22 (es-en)
20085
- revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
20086
- split: test
20087
  type: mteb/sts22-crosslingual-sts
 
 
 
20088
  metrics:
20089
  - type: pearson
20090
  value: 80.2006
@@ -20104,14 +20108,14 @@ model-index:
20104
  value: 80.4702
20105
  - type: main_score
20106
  value: 80.4702
20107
- task:
20108
  type: STS
20109
- - dataset:
20110
- config: zh-en
20111
  name: MTEB STS22 (zh-en)
20112
- revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
20113
- split: test
20114
  type: mteb/sts22-crosslingual-sts
 
 
 
20115
  metrics:
20116
  - type: pearson
20117
  value: 74.0885
@@ -20131,14 +20135,14 @@ model-index:
20131
  value: 72.4574
20132
  - type: main_score
20133
  value: 72.4574
20134
- task:
20135
  type: STS
20136
- - dataset:
20137
- config: pl-en
20138
  name: MTEB STS22 (pl-en)
20139
- revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
20140
- split: test
20141
  type: mteb/sts22-crosslingual-sts
 
 
 
20142
  metrics:
20143
  - type: pearson
20144
  value: 74.1794
@@ -20158,14 +20162,14 @@ model-index:
20158
  value: 70.6749
20159
  - type: main_score
20160
  value: 70.6749
20161
- task:
20162
  type: STS
20163
- - dataset:
20164
- config: default
20165
  name: MTEB STSBenchmark (default)
20166
- revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831
20167
- split: test
20168
  type: mteb/stsbenchmark-sts
 
 
 
20169
  metrics:
20170
  - type: pearson
20171
  value: 76.7328
@@ -20185,14 +20189,14 @@ model-index:
20185
  value: 78.4076
20186
  - type: main_score
20187
  value: 78.4076
20188
- task:
20189
- type: STS
20190
- - dataset:
20191
- config: default
20192
  name: MTEB SciDocsRR (default)
20193
- revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab
20194
- split: test
20195
  type: mteb/scidocs-reranking
 
 
 
20196
  metrics:
20197
  - type: map
20198
  value: 79.6097
@@ -20212,14 +20216,14 @@ model-index:
20212
  value: 49.763600000000004
20213
  - type: main_score
20214
  value: 79.6097
20215
- task:
20216
- type: Reranking
20217
- - dataset:
20218
- config: default
20219
  name: MTEB SciFact (default)
20220
- revision: 0228b52cf27578f30900b9e5271d331663a030d7
20221
- split: test
20222
  type: mteb/scifact
 
 
 
20223
  metrics:
20224
  - type: ndcg_at_1
20225
  value: 54.0
@@ -20503,14 +20507,14 @@ model-index:
20503
  value: 66.43050000000001
20504
  - type: main_score
20505
  value: 65.847
20506
- task:
20507
- type: Retrieval
20508
- - dataset:
20509
- config: default
20510
  name: MTEB SprintDuplicateQuestions (default)
20511
- revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46
20512
- split: test
20513
  type: mteb/sprintduplicatequestions-pairclassification
 
 
 
20514
  metrics:
20515
  - type: similarity_accuracy
20516
  value: 99.7386
@@ -20594,14 +20598,14 @@ model-index:
20594
  value: 93.50840000000001
20595
  - type: main_score
20596
  value: 93.50840000000001
20597
- task:
20598
- type: PairClassification
20599
- - dataset:
20600
- config: default
20601
  name: MTEB StackExchangeClustering (default)
20602
- revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259
20603
- split: test
20604
  type: mteb/stackexchange-clustering
 
 
 
20605
  metrics:
20606
  - type: v_measure
20607
  value: 55.9311
@@ -20609,14 +20613,14 @@ model-index:
20609
  value: 5.0881
20610
  - type: main_score
20611
  value: 55.9311
20612
- task:
20613
  type: Clustering
20614
- - dataset:
20615
- config: default
20616
  name: MTEB StackExchangeClusteringP2P (default)
20617
- revision: 815ca46b2622cec33ccafc3735d572c266efdb44
20618
- split: test
20619
  type: mteb/stackexchange-clustering-p2p
 
 
 
20620
  metrics:
20621
  - type: v_measure
20622
  value: 32.9298
@@ -20624,14 +20628,14 @@ model-index:
20624
  value: 1.7169
20625
  - type: main_score
20626
  value: 32.9298
20627
- task:
20628
- type: Clustering
20629
- - dataset:
20630
- config: default
20631
  name: MTEB StackOverflowDupQuestions (default)
20632
- revision: e185fbe320c72810689fc5848eb6114e1ef5ec69
20633
- split: test
20634
  type: mteb/stackoverflowdupquestions-reranking
 
 
 
20635
  metrics:
20636
  - type: map
20637
  value: 51.7759
@@ -20651,14 +20655,14 @@ model-index:
20651
  value: 37.3753
20652
  - type: main_score
20653
  value: 51.7759
20654
- task:
20655
- type: Reranking
20656
- - dataset:
20657
- config: default
20658
  name: MTEB StackOverflowQA (default)
20659
- revision: db8f169f3894c14a00251061f957b2063eef2bd5
20660
- split: test
20661
  type: CoIR-Retrieval/stackoverflow-qa
 
 
 
20662
  metrics:
20663
  - type: ndcg_at_1
20664
  value: 68.205
@@ -20942,14 +20946,14 @@ model-index:
20942
  value: 77.837
20943
  - type: main_score
20944
  value: 78.45
20945
- task:
20946
- type: Retrieval
20947
- - dataset:
20948
- config: default
20949
  name: MTEB SummEval (default)
20950
- revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c
20951
- split: test
20952
  type: mteb/summeval
 
 
 
20953
  metrics:
20954
  - type: pearson
20955
  value: 31.7097
@@ -20965,14 +20969,14 @@ model-index:
20965
  value: 31.7097
20966
  - type: main_score
20967
  value: 32.0256
20968
- task:
20969
- type: Summarization
20970
- - dataset:
20971
- config: default
20972
  name: MTEB SyntheticText2SQL (default)
20973
- revision: 686b87296c3a0191b5d9415a00526c62db9fce09
20974
- split: test
20975
  type: CoIR-Retrieval/synthetic-text2sql
 
 
 
20976
  metrics:
20977
  - type: ndcg_at_1
20978
  value: 3.5549999999999997
@@ -21256,14 +21260,14 @@ model-index:
21256
  value: -53.976
21257
  - type: main_score
21258
  value: 47.344
21259
- task:
21260
  type: Retrieval
21261
- - dataset:
21262
- config: default
21263
  name: MTEB TRECCOVID (default)
21264
- revision: bb9466bac8153a0349341eb1b22e06409e78ef4e
21265
- split: test
21266
  type: mteb/trec-covid
 
 
 
21267
  metrics:
21268
  - type: ndcg_at_1
21269
  value: 70.0
@@ -21547,14 +21551,14 @@ model-index:
21547
  value: -26.309700000000003
21548
  - type: main_score
21549
  value: 68.573
21550
- task:
21551
  type: Retrieval
21552
- - dataset:
21553
- config: default
21554
  name: MTEB Touche2020 (default)
21555
- revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f
21556
- split: test
21557
  type: mteb/touche2020
 
 
 
21558
  metrics:
21559
  - type: ndcg_at_1
21560
  value: 41.837
@@ -21838,14 +21842,14 @@ model-index:
21838
  value: -19.7412
21839
  - type: main_score
21840
  value: 27.306
21841
- task:
21842
- type: Retrieval
21843
- - dataset:
21844
- config: default
21845
  name: MTEB ToxicConversationsClassification (default)
21846
- revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de
21847
- split: test
21848
  type: mteb/toxic_conversations_50k
 
 
 
21849
  metrics:
21850
  - type: accuracy
21851
  value: 62.480500000000006
@@ -21859,14 +21863,14 @@ model-index:
21859
  value: 10.9948
21860
  - type: main_score
21861
  value: 62.480500000000006
21862
- task:
21863
  type: Classification
21864
- - dataset:
21865
- config: default
21866
  name: MTEB TweetSentimentExtractionClassification (default)
21867
- revision: d604517c81ca91fe16a244d1248fc021f9ecee7a
21868
- split: test
21869
  type: mteb/tweet_sentiment_extraction
 
 
 
21870
  metrics:
21871
  - type: accuracy
21872
  value: 58.3616
@@ -21876,14 +21880,14 @@ model-index:
21876
  value: 57.801
21877
  - type: main_score
21878
  value: 58.3616
21879
- task:
21880
- type: Classification
21881
- - dataset:
21882
- config: default
21883
  name: MTEB TwentyNewsgroupsClustering (default)
21884
- revision: 6125ec4e24fa026cec8a478383ee943acfbd5449
21885
- split: test
21886
  type: mteb/twentynewsgroups-clustering
 
 
 
21887
  metrics:
21888
  - type: v_measure
21889
  value: 38.6199
@@ -21891,14 +21895,14 @@ model-index:
21891
  value: 2.3855999999999997
21892
  - type: main_score
21893
  value: 38.6199
21894
- task:
21895
- type: Clustering
21896
- - dataset:
21897
- config: default
21898
  name: MTEB TwitterSemEval2015 (default)
21899
- revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1
21900
- split: test
21901
  type: mteb/twittersemeval2015-pairclassification
 
 
 
21902
  metrics:
21903
  - type: similarity_accuracy
21904
  value: 82.9886
@@ -21982,14 +21986,14 @@ model-index:
21982
  value: 64.4131
21983
  - type: main_score
21984
  value: 64.4131
21985
- task:
21986
  type: PairClassification
21987
- - dataset:
21988
- config: default
21989
  name: MTEB TwitterURLCorpus (default)
21990
- revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf
21991
- split: test
21992
  type: mteb/twitterurlcorpus-pairclassification
 
 
 
21993
  metrics:
21994
  - type: similarity_accuracy
21995
  value: 88.95100000000001
@@ -22073,9 +22077,6 @@ model-index:
22073
  value: 86.1545
22074
  - type: main_score
22075
  value: 86.1545
22076
- task:
22077
- type: PairClassification
22078
- pipeline_tag: sentence-similarity
22079
  ---
22080
  # Granite-Embedding-278m-multilingual
22081
 
 
21
  - multilingual
22
  - mteb
23
  - sentence-transformers
24
+ - onnx
25
+ pipeline_tag: sentence-similarity
26
  model-index:
27
  - name: ibm-granite/granite-embedding-278m-multilingual
28
  results:
29
+ - task:
30
+ type: Classification
31
+ dataset:
32
  name: MTEB AmazonCounterfactualClassification (en-ext)
 
 
33
  type: mteb/amazon_counterfactual
34
+ config: en-ext
35
+ split: test
36
+ revision: e8379541af4e31359cca9fbcf4b00f2671dba205
37
  metrics:
38
  - type: accuracy
39
  value: 73.4333
 
47
  value: 23.347
48
  - type: main_score
49
  value: 73.4333
50
+ - task:
51
  type: Classification
52
+ dataset:
 
53
  name: MTEB AmazonCounterfactualClassification (en)
 
 
54
  type: mteb/amazon_counterfactual
55
+ config: en
56
+ split: test
57
+ revision: e8379541af4e31359cca9fbcf4b00f2671dba205
58
  metrics:
59
  - type: accuracy
60
  value: 71.806
 
68
  value: 34.045700000000004
69
  - type: main_score
70
  value: 71.806
71
+ - task:
72
  type: Classification
73
+ dataset:
 
74
  name: MTEB AmazonPolarityClassification (default)
 
 
75
  type: mteb/amazon_polarity
76
+ config: default
77
+ split: test
78
+ revision: e2d317d38cd51312af73b3d32a06d1a08b442046
79
  metrics:
80
  - type: accuracy
81
  value: 67.5907
 
89
  value: 62.0368
90
  - type: main_score
91
  value: 67.5907
92
+ - task:
93
  type: Classification
94
+ dataset:
 
95
  name: MTEB AmazonReviewsClassification (en)
 
 
96
  type: mteb/amazon_reviews_multi
97
+ config: en
98
+ split: test
99
+ revision: 1399c76144fd37290681b995c656ef9b2e06e26d
100
  metrics:
101
  - type: accuracy
102
  value: 37.278
 
106
  value: 36.4099
107
  - type: main_score
108
  value: 37.278
109
+ - task:
110
+ type: Retrieval
111
+ dataset:
 
112
  name: MTEB AppsRetrieval (default)
 
 
113
  type: CoIR-Retrieval/apps
114
+ config: default
115
+ split: test
116
+ revision: f22508f96b7a36c2415181ed8bb76f76e04ae2d5
117
  metrics:
118
  - type: ndcg_at_1
119
  value: 3.453
 
397
  value: 30.7653
398
  - type: main_score
399
  value: 6.214
400
+ - task:
401
  type: Retrieval
402
+ dataset:
 
403
  name: MTEB ArguAna (default)
 
 
404
  type: mteb/arguana
405
+ config: default
406
+ split: test
407
+ revision: c22ab2a51041ffd869aaddef7af8d8215647e41a
408
  metrics:
409
  - type: ndcg_at_1
410
  value: 31.152
 
688
  value: 10.641399999999999
689
  - type: main_score
690
  value: 55.24400000000001
691
+ - task:
692
+ type: Clustering
693
+ dataset:
 
694
  name: MTEB ArxivClusteringP2P (default)
 
 
695
  type: mteb/arxiv-clustering-p2p
696
+ config: default
697
+ split: test
698
+ revision: a122ad7f3f0291bf49cc6f4d32aa80929df69d5d
699
  metrics:
700
  - type: v_measure
701
  value: 43.1321
 
703
  value: 13.594000000000001
704
  - type: main_score
705
  value: 43.1321
706
+ - task:
707
  type: Clustering
708
+ dataset:
 
709
  name: MTEB ArxivClusteringS2S (default)
 
 
710
  type: mteb/arxiv-clustering-s2s
711
+ config: default
712
+ split: test
713
+ revision: f910caf1a6075f7329cdf8c1a6135696f37dbd53
714
  metrics:
715
  - type: v_measure
716
  value: 32.9343
 
718
  value: 14.2478
719
  - type: main_score
720
  value: 32.9343
721
+ - task:
722
+ type: Reranking
723
+ dataset:
 
724
  name: MTEB AskUbuntuDupQuestions (default)
 
 
725
  type: mteb/askubuntudupquestions-reranking
726
+ config: default
727
+ split: test
728
+ revision: 2000358ca161889fa9c082cb41daa8dcfb161a54
729
  metrics:
730
  - type: map
731
  value: 62.3443
 
745
  value: 19.1211
746
  - type: main_score
747
  value: 62.3443
748
+ - task:
749
+ type: STS
750
+ dataset:
 
751
  name: MTEB BIOSSES (default)
 
 
752
  type: mteb/biosses-sts
753
+ config: default
754
+ split: test
755
+ revision: d3fb88f8f02e40887cd149695127462bbcf29b4a
756
  metrics:
757
  - type: pearson
758
  value: 84.3253
 
772
  value: 81.6362
773
  - type: main_score
774
  value: 81.6362
775
+ - task:
776
+ type: Classification
777
+ dataset:
 
778
  name: MTEB Banking77Classification (default)
 
 
779
  type: mteb/banking77
780
+ config: default
781
+ split: test
782
+ revision: 0fd18e25b25c072e09e0d92ab615fda904d66300
783
  metrics:
784
  - type: accuracy
785
  value: 78.0617
 
789
  value: 77.2085
790
  - type: main_score
791
  value: 78.0617
792
+ - task:
793
+ type: Clustering
794
+ dataset:
 
795
  name: MTEB BiorxivClusteringP2P (default)
 
 
796
  type: mteb/biorxiv-clustering-p2p
797
+ config: default
798
+ split: test
799
+ revision: 65b79d1d13f80053f67aca9498d9402c2d9f1f40
800
  metrics:
801
  - type: v_measure
802
  value: 35.8271
 
804
  value: 0.7191000000000001
805
  - type: main_score
806
  value: 35.8271
807
+ - task:
808
  type: Clustering
809
+ dataset:
 
810
  name: MTEB BiorxivClusteringS2S (default)
 
 
811
  type: mteb/biorxiv-clustering-s2s
812
+ config: default
813
+ split: test
814
+ revision: 258694dd0231531bc1fd9de6ceb52a0853c6d908
815
  metrics:
816
  - type: v_measure
817
  value: 30.3905
 
819
  value: 0.7136
820
  - type: main_score
821
  value: 30.3905
822
+ - task:
823
+ type: Retrieval
824
+ dataset:
 
825
  name: MTEB COIRCodeSearchNetRetrieval (python)
 
 
826
  type: CoIR-Retrieval/CodeSearchNet
827
+ config: python
828
+ split: test
829
+ revision: 4adc7bc41202b5c13543c9c886a25f340634dab3
830
  metrics:
831
  - type: ndcg_at_1
832
  value: 83.22800000000001
 
1110
  value: 89.1168
1111
  - type: main_score
1112
  value: 88.789
1113
+ - task:
1114
  type: Retrieval
1115
+ dataset:
 
1116
  name: MTEB COIRCodeSearchNetRetrieval (javascript)
 
 
1117
  type: CoIR-Retrieval/CodeSearchNet
1118
+ config: javascript
1119
+ split: test
1120
+ revision: 4adc7bc41202b5c13543c9c886a25f340634dab3
1121
  metrics:
1122
  - type: ndcg_at_1
1123
  value: 29.14
 
1401
  value: 55.3852
1402
  - type: main_score
1403
  value: 38.778
1404
+ - task:
1405
  type: Retrieval
1406
+ dataset:
 
1407
  name: MTEB COIRCodeSearchNetRetrieval (go)
 
 
1408
  type: CoIR-Retrieval/CodeSearchNet
1409
+ config: go
1410
+ split: test
1411
+ revision: 4adc7bc41202b5c13543c9c886a25f340634dab3
1412
  metrics:
1413
  - type: ndcg_at_1
1414
  value: 42.809999999999995
 
1692
  value: 56.725300000000004
1693
  - type: main_score
1694
  value: 56.296
1695
+ - task:
1696
  type: Retrieval
1697
+ dataset:
 
1698
  name: MTEB COIRCodeSearchNetRetrieval (ruby)
 
 
1699
  type: CoIR-Retrieval/CodeSearchNet
1700
+ config: ruby
1701
+ split: test
1702
+ revision: 4adc7bc41202b5c13543c9c886a25f340634dab3
1703
  metrics:
1704
  - type: ndcg_at_1
1705
  value: 31.721
 
1983
  value: 53.4268
1984
  - type: main_score
1985
  value: 42.536
1986
+ - task:
1987
  type: Retrieval
1988
+ dataset:
 
1989
  name: MTEB COIRCodeSearchNetRetrieval (java)
 
 
1990
  type: CoIR-Retrieval/CodeSearchNet
1991
+ config: java
1992
+ split: test
1993
+ revision: 4adc7bc41202b5c13543c9c886a25f340634dab3
1994
  metrics:
1995
  - type: ndcg_at_1
1996
  value: 36.887
 
2274
  value: 58.3678
2275
  - type: main_score
2276
  value: 48.54
2277
+ - task:
2278
  type: Retrieval
2279
+ dataset:
 
2280
  name: MTEB COIRCodeSearchNetRetrieval (php)
 
 
2281
  type: CoIR-Retrieval/CodeSearchNet
2282
+ config: php
2283
+ split: test
2284
+ revision: 4adc7bc41202b5c13543c9c886a25f340634dab3
2285
  metrics:
2286
  - type: ndcg_at_1
2287
  value: 30.734
 
2565
  value: 50.2532
2566
  - type: main_score
2567
  value: 42.510999999999996
2568
+ - task:
2569
  type: Retrieval
2570
+ dataset:
 
2571
  name: MTEB CQADupstackAndroidRetrieval (default)
 
 
2572
  type: mteb/cqadupstack-android
2573
+ config: default
2574
+ split: test
2575
+ revision: f46a197baaae43b4f621051089b82a364682dfeb
2576
  metrics:
2577
  - type: ndcg_at_1
2578
  value: 42.918
 
2856
  value: 46.812799999999996
2857
  - type: main_score
2858
  value: 53.047999999999995
2859
+ - task:
2860
  type: Retrieval
2861
+ dataset:
 
2862
  name: MTEB CQADupstackEnglishRetrieval (default)
 
 
2863
  type: mteb/cqadupstack-english
2864
+ config: default
2865
+ split: test
2866
+ revision: ad9991cb51e31e31e430383c75ffb2885547b5f0
2867
  metrics:
2868
  - type: ndcg_at_1
2869
  value: 35.796
 
3147
  value: 44.8928
3148
  - type: main_score
3149
  value: 43.868
3150
+ - task:
3151
  type: Retrieval
3152
+ dataset:
 
3153
  name: MTEB CQADupstackGamingRetrieval (default)
 
 
3154
  type: mteb/cqadupstack-gaming
3155
+ config: default
3156
+ split: test
3157
+ revision: 4885aa143210c98657558c04aaf3dc47cfb54340
3158
  metrics:
3159
  - type: ndcg_at_1
3160
  value: 43.448
 
3438
  value: 52.121399999999994
3439
  - type: main_score
3440
  value: 56.369
3441
+ - task:
3442
  type: Retrieval
3443
+ dataset:
 
3444
  name: MTEB CQADupstackGisRetrieval (default)
 
 
3445
  type: mteb/cqadupstack-gis
3446
+ config: default
3447
+ split: test
3448
+ revision: 5003b3064772da1887988e05400cf3806fe491f2
3449
  metrics:
3450
  - type: ndcg_at_1
3451
  value: 31.863999999999997
 
3729
  value: 38.4264
3730
  - type: main_score
3731
  value: 43.503
3732
+ - task:
3733
  type: Retrieval
3734
+ dataset:
 
3735
  name: MTEB CQADupstackMathematicaRetrieval (default)
 
 
3736
  type: mteb/cqadupstack-mathematica
3737
+ config: default
3738
+ split: test
3739
+ revision: 90fceea13679c63fe563ded68f3b6f06e50061de
3740
  metrics:
3741
  - type: ndcg_at_1
3742
  value: 22.637
 
4020
  value: 30.9234
4021
  - type: main_score
4022
  value: 32.024
4023
+ - task:
4024
  type: Retrieval
4025
+ dataset:
 
4026
  name: MTEB CQADupstackPhysicsRetrieval (default)
 
 
4027
  type: mteb/cqadupstack-physics
4028
+ config: default
4029
+ split: test
4030
+ revision: 79531abbd1fb92d06c6d6315a0cbbbf5bb247ea4
4031
  metrics:
4032
  - type: ndcg_at_1
4033
  value: 36.477
 
4311
  value: 50.6012
4312
  - type: main_score
4313
  value: 47.316
4314
+ - task:
4315
  type: Retrieval
4316
+ dataset:
 
4317
  name: MTEB CQADupstackProgrammersRetrieval (default)
 
 
4318
  type: mteb/cqadupstack-programmers
4319
+ config: default
4320
+ split: test
4321
+ revision: 6184bc1440d2dbc7612be22b50686b8826d22b32
4322
  metrics:
4323
  - type: ndcg_at_1
4324
  value: 33.676
 
4602
  value: 44.222
4603
  - type: main_score
4604
  value: 43.580999999999996
4605
+ - task:
4606
  type: Retrieval
4607
+ dataset:
 
4608
  name: MTEB CQADupstackRetrieval (default)
 
 
4609
  type: CQADupstackRetrieval_is_a_combined_dataset
4610
+ config: default
4611
+ split: test
4612
+ revision: 160c094312a0e1facb97e55eeddb698c0abe3571
4613
  metrics:
4614
  - type: ndcg_at_1
4615
  value: 32.588499999999996
 
4893
  value: 43.299625
4894
  - type: main_score
4895
  value: 42.74341666666667
4896
+ - task:
4897
  type: Retrieval
4898
+ dataset:
 
4899
  name: MTEB CQADupstackRetrieval (default)
 
 
4900
  type: CQADupstackRetrieval_is_a_combined_dataset
4901
+ config: default
4902
+ split: test
4903
+ revision: CQADupstackRetrieval_is_a_combined_dataset
4904
  metrics:
4905
  - type: main_score
4906
  value: 42.743416666666675
4907
  - type: ndcg_at_10
4908
  value: 42.743416666666675
4909
+ - task:
4910
  type: Retrieval
4911
+ dataset:
 
4912
  name: MTEB CQADupstackStatsRetrieval (default)
 
 
4913
  type: mteb/cqadupstack-stats
4914
+ config: default
4915
+ split: test
4916
+ revision: 65ac3a16b8e91f9cee4c9828cc7c335575432a2a
4917
  metrics:
4918
  - type: ndcg_at_1
4919
  value: 27.607
 
5197
  value: 50.3081
5198
  - type: main_score
5199
  value: 36.796
5200
+ - task:
5201
  type: Retrieval
5202
+ dataset:
 
5203
  name: MTEB CQADupstackTexRetrieval (default)
 
 
5204
  type: mteb/cqadupstack-tex
5205
+ config: default
5206
+ split: test
5207
+ revision: 46989137a86843e03a6195de44b09deda022eec7
5208
  metrics:
5209
  - type: ndcg_at_1
5210
  value: 23.159
 
5488
  value: 36.5259
5489
  - type: main_score
5490
  value: 31.775
5491
+ - task:
5492
  type: Retrieval
5493
+ dataset:
 
5494
  name: MTEB CQADupstackUnixRetrieval (default)
 
 
5495
  type: mteb/cqadupstack-unix
5496
+ config: default
5497
+ split: test
5498
+ revision: 6c6430d3a6d36f8d2a829195bc5dc94d7e063e53
5499
  metrics:
5500
  - type: ndcg_at_1
5501
  value: 34.981
 
5779
  value: 49.8286
5780
  - type: main_score
5781
  value: 45.275
5782
+ - task:
5783
  type: Retrieval
5784
+ dataset:
 
5785
  name: MTEB CQADupstackWebmastersRetrieval (default)
 
 
5786
  type: mteb/cqadupstack-webmasters
5787
+ config: default
5788
+ split: test
5789
+ revision: 160c094312a0e1facb97e55eeddb698c0abe3571
5790
  metrics:
5791
  - type: ndcg_at_1
5792
  value: 32.806000000000004
 
6070
  value: 42.3078
6071
  - type: main_score
6072
  value: 42.957
6073
+ - task:
6074
  type: Retrieval
6075
+ dataset:
 
6076
  name: MTEB CQADupstackWordpressRetrieval (default)
 
 
6077
  type: mteb/cqadupstack-wordpress
6078
+ config: default
6079
+ split: test
6080
+ revision: 4ffe81d471b1924886b33c7567bfb200e9eec5c4
6081
  metrics:
6082
  - type: ndcg_at_1
6083
  value: 25.692999999999998
 
6361
  value: 32.6251
6362
  - type: main_score
6363
  value: 36.409000000000006
6364
+ - task:
6365
  type: Retrieval
6366
+ dataset:
 
6367
  name: MTEB ClimateFEVER (default)
 
 
6368
  type: mteb/climate-fever
6369
+ config: default
6370
+ split: test
6371
+ revision: 47f2ac6acb640fc46020b02a5b59fdda04d39380
6372
  metrics:
6373
  - type: ndcg_at_1
6374
  value: 26.971
 
6652
  value: 23.974899999999998
6653
  - type: main_score
6654
  value: 29.494
6655
+ - task:
6656
  type: Retrieval
6657
+ dataset:
 
6658
  name: MTEB CodeFeedbackMT (default)
 
 
6659
  type: CoIR-Retrieval/codefeedback-mt
6660
+ config: default
6661
+ split: test
6662
+ revision: b0f12fa0c0dd67f59c95a5c33d02aeeb4c398c5f
6663
  metrics:
6664
  - type: ndcg_at_1
6665
  value: 21.044
 
6943
  value: 41.7802
6944
  - type: main_score
6945
  value: 31.391999999999996
6946
+ - task:
6947
  type: Retrieval
6948
+ dataset:
 
6949
  name: MTEB CodeFeedbackST (default)
 
 
6950
  type: CoIR-Retrieval/codefeedback-st
6951
+ config: default
6952
+ split: test
6953
+ revision: d213819e87aab9010628da8b73ab4eb337c89340
6954
  metrics:
6955
  - type: ndcg_at_1
6956
  value: 51.227000000000004
 
7234
  value: 63.382000000000005
7235
  - type: main_score
7236
  value: 67.72200000000001
7237
+ - task:
7238
  type: Retrieval
7239
+ dataset:
 
7240
  name: MTEB CodeSearchNetCCRetrieval (python)
 
 
7241
  type: CoIR-Retrieval/CodeSearchNet-ccr
7242
+ config: python
7243
+ split: test
7244
+ revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8
7245
  metrics:
7246
  - type: ndcg_at_1
7247
  value: 32.417
 
7525
  value: 50.1074
7526
  - type: main_score
7527
  value: 45.532000000000004
7528
+ - task:
7529
  type: Retrieval
7530
+ dataset:
 
7531
  name: MTEB CodeSearchNetCCRetrieval (javascript)
 
 
7532
  type: CoIR-Retrieval/CodeSearchNet-ccr
7533
+ config: javascript
7534
+ split: test
7535
+ revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8
7536
  metrics:
7537
  - type: ndcg_at_1
7538
  value: 33.364
 
7816
  value: 48.0248
7817
  - type: main_score
7818
  value: 46.024
7819
+ - task:
7820
  type: Retrieval
7821
+ dataset:
 
7822
  name: MTEB CodeSearchNetCCRetrieval (go)
 
 
7823
  type: CoIR-Retrieval/CodeSearchNet-ccr
7824
+ config: go
7825
+ split: test
7826
+ revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8
7827
  metrics:
7828
  - type: ndcg_at_1
7829
  value: 26.471
 
8107
  value: 41.4381
8108
  - type: main_score
8109
  value: 37.555
8110
+ - task:
8111
  type: Retrieval
8112
+ dataset:
 
8113
  name: MTEB CodeSearchNetCCRetrieval (ruby)
 
 
8114
  type: CoIR-Retrieval/CodeSearchNet-ccr
8115
+ config: ruby
8116
+ split: test
8117
+ revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8
8118
  metrics:
8119
  - type: ndcg_at_1
8120
  value: 36.003
 
8398
  value: 51.7548
8399
  - type: main_score
8400
  value: 47.549
8401
+ - task:
8402
  type: Retrieval
8403
+ dataset:
 
8404
  name: MTEB CodeSearchNetCCRetrieval (java)
 
 
8405
  type: CoIR-Retrieval/CodeSearchNet-ccr
8406
+ config: java
8407
+ split: test
8408
+ revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8
8409
  metrics:
8410
  - type: ndcg_at_1
8411
  value: 33.355000000000004
 
8689
  value: 48.8277
8690
  - type: main_score
8691
  value: 45.539
8692
+ - task:
8693
  type: Retrieval
8694
+ dataset:
 
8695
  name: MTEB CodeSearchNetCCRetrieval (php)
 
 
8696
  type: CoIR-Retrieval/CodeSearchNet-ccr
8697
+ config: php
8698
+ split: test
8699
+ revision: 6e1effa2c03723c5fde48ee912b5ee08d4f211e8
8700
  metrics:
8701
  - type: ndcg_at_1
8702
  value: 25.139
 
8980
  value: 45.191900000000004
8981
  - type: main_score
8982
  value: 35.942
8983
+ - task:
8984
  type: Retrieval
8985
+ dataset:
 
8986
  name: MTEB CodeSearchNetRetrieval (python)
 
 
8987
  type: code-search-net/code_search_net
8988
+ config: python
8989
+ split: test
8990
+ revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759
8991
  metrics:
8992
  - type: ndcg_at_1
8993
  value: 70.89999999999999
 
9271
  value: 68.3376
9272
  - type: main_score
9273
  value: 83.12
9274
+ - task:
9275
  type: Retrieval
9276
+ dataset:
 
9277
  name: MTEB CodeSearchNetRetrieval (javascript)
 
 
9278
  type: code-search-net/code_search_net
9279
+ config: javascript
9280
+ split: test
9281
+ revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759
9282
  metrics:
9283
  - type: ndcg_at_1
9284
  value: 57.99999999999999
 
9562
  value: 67.08579999999999
9563
  - type: main_score
9564
  value: 70.34
9565
+ - task:
9566
  type: Retrieval
9567
+ dataset:
 
9568
  name: MTEB CodeSearchNetRetrieval (go)
 
 
9569
  type: code-search-net/code_search_net
9570
+ config: go
9571
+ split: test
9572
+ revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759
9573
  metrics:
9574
  - type: ndcg_at_1
9575
  value: 75.6
 
9853
  value: 74.4583
9854
  - type: main_score
9855
  value: 86.139
9856
+ - task:
9857
  type: Retrieval
9858
+ dataset:
 
9859
  name: MTEB CodeSearchNetRetrieval (ruby)
 
 
9860
  type: code-search-net/code_search_net
9861
+ config: ruby
9862
+ split: test
9863
+ revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759
9864
  metrics:
9865
  - type: ndcg_at_1
9866
  value: 61.3
 
10144
  value: 69.0767
10145
  - type: main_score
10146
  value: 74.736
10147
+ - task:
10148
  type: Retrieval
10149
+ dataset:
 
10150
  name: MTEB CodeSearchNetRetrieval (java)
 
 
10151
  type: code-search-net/code_search_net
10152
+ config: java
10153
+ split: test
10154
+ revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759
10155
  metrics:
10156
  - type: ndcg_at_1
10157
  value: 55.1
 
10435
  value: 60.16010000000001
10436
  - type: main_score
10437
  value: 70.89
10438
+ - task:
10439
  type: Retrieval
10440
+ dataset:
 
10441
  name: MTEB CodeSearchNetRetrieval (php)
 
 
10442
  type: code-search-net/code_search_net
10443
+ config: php
10444
+ split: test
10445
+ revision: fdc6a9e39575768c27eb8a2a5f702bf846eb4759
10446
  metrics:
10447
  - type: ndcg_at_1
10448
  value: 56.89999999999999
 
10726
  value: 63.048300000000005
10727
  - type: main_score
10728
  value: 72.92999999999999
10729
+ - task:
10730
  type: Retrieval
10731
+ dataset:
 
10732
  name: MTEB CodeTransOceanContest (default)
 
 
10733
  type: CoIR-Retrieval/codetrans-contest
10734
+ config: default
10735
+ split: test
10736
+ revision: 20da4eb20a4b17300c0986ee148c90867a7f2a4d
10737
  metrics:
10738
  - type: ndcg_at_1
10739
  value: 50.226000000000006
 
11017
  value: 71.28710000000001
11018
  - type: main_score
11019
  value: 60.831
11020
+ - task:
11021
  type: Retrieval
11022
+ dataset:
 
11023
  name: MTEB CodeTransOceanDL (default)
 
 
11024
  type: CoIR-Retrieval/codetrans-dl
11025
+ config: default
11026
+ split: test
11027
+ revision: 281562cb8a1265ab5c0824bfa6ddcd9b0a15618f
11028
  metrics:
11029
  - type: ndcg_at_1
11030
  value: 8.889
 
11308
  value: 9.591
11309
  - type: main_score
11310
  value: 32.138
11311
+ - task:
11312
  type: Retrieval
11313
+ dataset:
 
11314
  name: MTEB CosQA (default)
 
 
11315
  type: CoIR-Retrieval/cosqa
11316
+ config: default
11317
+ split: test
11318
+ revision: bc5efb7e9d437246ce393ed19d772e08e4a79535
11319
  metrics:
11320
  - type: ndcg_at_1
11321
  value: 14.6
 
11599
  value: 24.0998
11600
  - type: main_score
11601
  value: 33.452
11602
+ - task:
11603
  type: Retrieval
11604
+ dataset:
 
11605
  name: MTEB DBPedia (default)
 
 
11606
  type: mteb/dbpedia
11607
+ config: default
11608
+ split: test
11609
+ revision: c0f706b76e590d620bd6618b3ca8efdd34e2d659
11610
  metrics:
11611
  - type: ndcg_at_1
11612
  value: 48.75
 
11890
  value: 47.7958
11891
  - type: main_score
11892
  value: 34.565
11893
+ - task:
11894
+ type: Classification
11895
+ dataset:
 
11896
  name: MTEB EmotionClassification (default)
 
 
11897
  type: mteb/emotion
11898
+ config: default
11899
+ split: test
11900
+ revision: 4f58c6b202a23cf9a4da393831edf4f9183cad37
11901
  metrics:
11902
  - type: accuracy
11903
  value: 36.449999999999996
 
11907
  value: 38.7818
11908
  - type: main_score
11909
  value: 36.449999999999996
11910
+ - task:
11911
+ type: Retrieval
11912
+ dataset:
 
11913
  name: MTEB FEVER (default)
 
 
11914
  type: mteb/fever
11915
+ config: default
11916
+ split: test
11917
+ revision: bea83ef9e8fb933d90a2f1d5515737465d613e12
11918
  metrics:
11919
  - type: ndcg_at_1
11920
  value: 77.93299999999999
 
12198
  value: 64.1637
12199
  - type: main_score
12200
  value: 84.932
12201
+ - task:
12202
  type: Retrieval
12203
+ dataset:
 
12204
  name: MTEB FiQA2018 (default)
 
 
12205
  type: mteb/fiqa
12206
+ config: default
12207
+ split: test
12208
+ revision: 27a168819829fe9bcd655c2df245fb19452e8e06
12209
  metrics:
12210
  - type: ndcg_at_1
12211
  value: 34.259
 
12489
  value: 46.399699999999996
12490
  - type: main_score
12491
  value: 35.663
12492
+ - task:
12493
  type: Retrieval
12494
+ dataset:
 
12495
  name: MTEB HotpotQA (default)
 
 
12496
  type: mteb/hotpotqa
12497
+ config: default
12498
+ split: test
12499
+ revision: ab518f4d6fcca38d87c25209f94beba119d02014
12500
  metrics:
12501
  - type: ndcg_at_1
12502
  value: 75.908
 
12780
  value: 69.503
12781
  - type: main_score
12782
  value: 61.513
12783
+ - task:
12784
+ type: Classification
12785
+ dataset:
 
12786
  name: MTEB ImdbClassification (default)
 
 
12787
  type: mteb/imdb
12788
+ config: default
12789
+ split: test
12790
+ revision: 3d86128a09e091d6018b6d26cad27f2739fc2db7
12791
  metrics:
12792
  - type: accuracy
12793
  value: 63.0232
 
12801
  value: 58.377199999999995
12802
  - type: main_score
12803
  value: 63.0232
12804
+ - task:
12805
+ type: Retrieval
12806
+ dataset:
 
12807
  name: MTEB MIRACLRetrieval (ar)
 
 
12808
  type: miracl/mmteb-miracl
12809
+ config: ar
12810
+ split: dev
12811
+ revision: main
12812
  metrics:
12813
  - type: ndcg_at_1
12814
  value: 57.459
 
13092
  value: 41.6879
13093
  - type: main_score
13094
  value: 64.238
13095
+ - task:
13096
  type: Retrieval
13097
+ dataset:
 
13098
  name: MTEB MIRACLRetrieval (bn)
 
 
13099
  type: miracl/mmteb-miracl
13100
+ config: bn
13101
+ split: dev
13102
+ revision: main
13103
  metrics:
13104
  - type: ndcg_at_1
13105
  value: 60.341
 
13383
  value: 40.7358
13384
  - type: main_score
13385
  value: 68.05499999999999
13386
+ - task:
13387
  type: Retrieval
13388
+ dataset:
 
13389
  name: MTEB MIRACLRetrieval (de)
 
 
13390
  type: miracl/mmteb-miracl
13391
+ config: de
13392
+ split: dev
13393
+ revision: main
13394
  metrics:
13395
  - type: ndcg_at_1
13396
  value: 45.574
 
13674
  value: 42.620000000000005
13675
  - type: main_score
13676
  value: 48.123
13677
+ - task:
13678
  type: Retrieval
13679
+ dataset:
 
13680
  name: MTEB MIRACLRetrieval (en)
 
 
13681
  type: miracl/mmteb-miracl
13682
+ config: en
13683
+ split: dev
13684
+ revision: main
13685
  metrics:
13686
  - type: ndcg_at_1
13687
  value: 45.556999999999995
 
13965
  value: 30.0019
13966
  - type: main_score
13967
  value: 49.372
13968
+ - task:
13969
  type: Retrieval
13970
+ dataset:
 
13971
  name: MTEB MIRACLRetrieval (es)
 
 
13972
  type: miracl/mmteb-miracl
13973
+ config: es
13974
+ split: dev
13975
+ revision: main
13976
  metrics:
13977
  - type: ndcg_at_1
13978
  value: 55.71
 
14256
  value: 32.1927
14257
  - type: main_score
14258
  value: 49.688
14259
+ - task:
14260
  type: Retrieval
14261
+ dataset:
 
14262
  name: MTEB MIRACLRetrieval (fa)
 
 
14263
  type: miracl/mmteb-miracl
14264
+ config: fa
14265
+ split: dev
14266
+ revision: main
14267
  metrics:
14268
  - type: ndcg_at_1
14269
  value: 39.873
 
14547
  value: 20.4831
14548
  - type: main_score
14549
  value: 50.226000000000006
14550
+ - task:
14551
  type: Retrieval
14552
+ dataset:
 
14553
  name: MTEB MIRACLRetrieval (fi)
 
 
14554
  type: miracl/mmteb-miracl
14555
+ config: fi
14556
+ split: dev
14557
+ revision: main
14558
  metrics:
14559
  - type: ndcg_at_1
14560
  value: 60.818000000000005
 
14838
  value: 48.753600000000006
14839
  - type: main_score
14840
  value: 67.46
14841
+ - task:
14842
  type: Retrieval
14843
+ dataset:
 
14844
  name: MTEB MIRACLRetrieval (fr)
 
 
14845
  type: miracl/mmteb-miracl
14846
+ config: fr
14847
+ split: dev
14848
+ revision: main
14849
  metrics:
14850
  - type: ndcg_at_1
14851
  value: 39.65
 
15129
  value: 22.6855
15130
  - type: main_score
15131
  value: 49.891000000000005
15132
+ - task:
15133
  type: Retrieval
15134
+ dataset:
 
15135
  name: MTEB MIRACLRetrieval (hi)
 
 
15136
  type: miracl/mmteb-miracl
15137
+ config: hi
15138
+ split: dev
15139
+ revision: main
15140
  metrics:
15141
  - type: ndcg_at_1
15142
  value: 36.857
 
15420
  value: 35.1808
15421
  - type: main_score
15422
  value: 46.141
15423
+ - task:
15424
  type: Retrieval
15425
+ dataset:
 
15426
  name: MTEB MIRACLRetrieval (id)
 
 
15427
  type: miracl/mmteb-miracl
15428
+ config: id
15429
+ split: dev
15430
+ revision: main
15431
  metrics:
15432
  - type: ndcg_at_1
15433
  value: 46.354
 
15711
  value: 27.054000000000002
15712
  - type: main_score
15713
  value: 47.229
15714
+ - task:
15715
  type: Retrieval
15716
+ dataset:
 
15717
  name: MTEB MIRACLRetrieval (ja)
 
 
15718
  type: miracl/mmteb-miracl
15719
+ config: ja
15720
+ split: dev
15721
+ revision: main
15722
  metrics:
15723
  - type: ndcg_at_1
15724
  value: 56.279
 
16002
  value: 42.1768
16003
  - type: main_score
16004
  value: 62.81
16005
+ - task:
16006
  type: Retrieval
16007
+ dataset:
 
16008
  name: MTEB MIRACLRetrieval (ko)
 
 
16009
  type: miracl/mmteb-miracl
16010
+ config: ko
16011
+ split: dev
16012
+ revision: main
16013
  metrics:
16014
  - type: ndcg_at_1
16015
  value: 52.581999999999994
 
16293
  value: 41.166199999999996
16294
  - type: main_score
16295
  value: 59.216
16296
+ - task:
16297
  type: Retrieval
16298
+ dataset:
 
16299
  name: MTEB MIRACLRetrieval (ru)
 
 
16300
  type: miracl/mmteb-miracl
16301
+ config: ru
16302
+ split: dev
16303
+ revision: main
16304
  metrics:
16305
  - type: ndcg_at_1
16306
  value: 47.524
 
16584
  value: 33.353300000000004
16585
  - type: main_score
16586
  value: 52.349000000000004
16587
+ - task:
16588
  type: Retrieval
16589
+ dataset:
 
16590
  name: MTEB MIRACLRetrieval (sw)
 
 
16591
  type: miracl/mmteb-miracl
16592
+ config: sw
16593
+ split: dev
16594
+ revision: main
16595
  metrics:
16596
  - type: ndcg_at_1
16597
  value: 51.66
 
16875
  value: 36.696400000000004
16876
  - type: main_score
16877
  value: 61.271
16878
+ - task:
16879
  type: Retrieval
16880
+ dataset:
 
16881
  name: MTEB MIRACLRetrieval (te)
 
 
16882
  type: miracl/mmteb-miracl
16883
+ config: te
16884
+ split: dev
16885
+ revision: main
16886
  metrics:
16887
  - type: ndcg_at_1
16888
  value: 63.647
 
17166
  value: 59.1847
17167
  - type: main_score
17168
  value: 79.149
17169
+ - task:
17170
  type: Retrieval
17171
+ dataset:
 
17172
  name: MTEB MIRACLRetrieval (th)
 
 
17173
  type: miracl/mmteb-miracl
17174
+ config: th
17175
+ split: dev
17176
+ revision: main
17177
  metrics:
17178
  - type: ndcg_at_1
17179
  value: 66.712
 
17457
  value: 46.5276
17458
  - type: main_score
17459
  value: 73.324
17460
+ - task:
17461
  type: Retrieval
17462
+ dataset:
 
17463
  name: MTEB MIRACLRetrieval (yo)
 
 
17464
  type: miracl/mmteb-miracl
17465
+ config: yo
17466
+ split: dev
17467
+ revision: main
17468
  metrics:
17469
  - type: ndcg_at_1
17470
  value: 49.58
 
17748
  value: 50.3215
17749
  - type: main_score
17750
  value: 68.705
17751
+ - task:
17752
  type: Retrieval
17753
+ dataset:
 
17754
  name: MTEB MIRACLRetrieval (zh)
 
 
17755
  type: miracl/mmteb-miracl
17756
+ config: zh
17757
+ split: dev
17758
+ revision: main
17759
  metrics:
17760
  - type: ndcg_at_1
17761
  value: 47.583
 
18039
  value: 29.526799999999998
18040
  - type: main_score
18041
  value: 52.553000000000004
18042
+ - task:
18043
  type: Retrieval
18044
+ dataset:
 
18045
  name: MTEB MSMARCO (default)
 
 
18046
  type: mteb/msmarco
18047
+ config: default
18048
+ split: dev
18049
+ revision: c5a29a104738b98a9e76336939199e264163d4a0
18050
  metrics:
18051
  - type: ndcg_at_1
18052
  value: 14.155000000000001
 
18330
  value: 24.7274
18331
  - type: main_score
18332
  value: 29.866999999999997
18333
+ - task:
18334
+ type: Classification
18335
+ dataset:
 
18336
  name: MTEB MTOPDomainClassification (en)
 
 
18337
  type: mteb/mtop_domain
18338
+ config: en
18339
+ split: test
18340
+ revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf
18341
  metrics:
18342
  - type: accuracy
18343
  value: 89.89970000000001
 
18347
  value: 89.8682
18348
  - type: main_score
18349
  value: 89.89970000000001
18350
+ - task:
18351
  type: Classification
18352
+ dataset:
 
18353
  name: MTEB MTOPIntentClassification (en)
 
 
18354
  type: mteb/mtop_intent
18355
+ config: en
18356
+ split: test
18357
+ revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba
18358
  metrics:
18359
  - type: accuracy
18360
  value: 60.26899999999999
 
18364
  value: 63.033899999999996
18365
  - type: main_score
18366
  value: 60.26899999999999
18367
+ - task:
18368
  type: Classification
18369
+ dataset:
 
18370
  name: MTEB MassiveIntentClassification (en)
 
 
18371
  type: mteb/amazon_massive_intent
18372
+ config: en
18373
+ split: test
18374
+ revision: 4672e20407010da34463acc759c162ca9734bca6
18375
  metrics:
18376
  - type: accuracy
18377
  value: 63.9509
 
18381
  value: 62.8
18382
  - type: main_score
18383
  value: 63.9509
18384
+ - task:
18385
  type: Classification
18386
+ dataset:
 
18387
  name: MTEB MassiveScenarioClassification (en)
 
 
18388
  type: mteb/amazon_massive_scenario
18389
+ config: en
18390
+ split: test
18391
+ revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8
18392
  metrics:
18393
  - type: accuracy
18394
  value: 70.928
 
18398
  value: 70.6366
18399
  - type: main_score
18400
  value: 70.928
18401
+ - task:
18402
+ type: Clustering
18403
+ dataset:
 
18404
  name: MTEB MedrxivClusteringP2P (default)
 
 
18405
  type: mteb/medrxiv-clustering-p2p
18406
+ config: default
18407
+ split: test
18408
+ revision: e7a26af6f3ae46b30dde8737f02c07b1505bcc73
18409
+ metrics:
18410
  - type: v_measure
18411
  value: 31.522
18412
  - type: v_measure_std
18413
  value: 1.5528
18414
  - type: main_score
18415
  value: 31.522
18416
+ - task:
18417
  type: Clustering
18418
+ dataset:
 
18419
  name: MTEB MedrxivClusteringS2S (default)
 
 
18420
  type: mteb/medrxiv-clustering-s2s
18421
+ config: default
18422
+ split: test
18423
+ revision: 35191c8c0dca72d8ff3efcd72aa802307d469663
18424
  metrics:
18425
  - type: v_measure
18426
  value: 28.572599999999998
 
18428
  value: 1.8154
18429
  - type: main_score
18430
  value: 28.572599999999998
18431
+ - task:
18432
+ type: Reranking
18433
+ dataset:
 
18434
  name: MTEB MindSmallReranking (default)
 
 
18435
  type: mteb/mind_small
18436
+ config: default
18437
+ split: test
18438
+ revision: 59042f120c80e8afa9cdbb224f67076cec0fc9a7
18439
  metrics:
18440
  - type: map
18441
  value: 30.5381
 
18455
  value: 13.2721
18456
  - type: main_score
18457
  value: 30.5381
18458
+ - task:
18459
+ type: Retrieval
18460
+ dataset:
 
18461
  name: MTEB NFCorpus (default)
 
 
18462
  type: mteb/nfcorpus
18463
+ config: default
18464
+ split: test
18465
+ revision: ec0fa4fe99da2ff19ca1214b7966684033a58814
18466
  metrics:
18467
  - type: ndcg_at_1
18468
  value: 38.080000000000005
 
18746
  value: 34.3718
18747
  - type: main_score
18748
  value: 28.903000000000002
18749
+ - task:
18750
  type: Retrieval
18751
+ dataset:
 
18752
  name: MTEB NQ (default)
 
 
18753
  type: mteb/nq
18754
+ config: default
18755
+ split: test
18756
+ revision: b774495ed302d8c44a3a7ea25c90dbce03968f31
18757
  metrics:
18758
  - type: ndcg_at_1
18759
  value: 34.589
 
19037
  value: 32.6192
19038
  - type: main_score
19039
  value: 53.410000000000004
19040
+ - task:
19041
  type: Retrieval
19042
+ dataset:
 
19043
  name: MTEB QuoraRetrieval (default)
 
 
19044
  type: mteb/quora
19045
+ config: default
19046
+ split: test
19047
+ revision: e4e08e0b7dbe3c8700f0daef558ff32256715259
19048
  metrics:
19049
  - type: ndcg_at_1
19050
  value: 79.64
 
19328
  value: 75.84349999999999
19329
  - type: main_score
19330
  value: 86.871
19331
+ - task:
19332
+ type: Clustering
19333
+ dataset:
 
19334
  name: MTEB RedditClustering (default)
 
 
19335
  type: mteb/reddit-clustering
19336
+ config: default
19337
+ split: test
19338
+ revision: 24640382cdbf8abc73003fb0fa6d111a705499eb
19339
  metrics:
19340
  - type: v_measure
19341
  value: 45.8568
 
19343
  value: 5.685
19344
  - type: main_score
19345
  value: 45.8568
19346
+ - task:
19347
  type: Clustering
19348
+ dataset:
 
19349
  name: MTEB RedditClusteringP2P (default)
 
 
19350
  type: mteb/reddit-clustering-p2p
19351
+ config: default
19352
+ split: test
19353
+ revision: 385e3cb46b4cfa89021f56c4380204149d0efe33
19354
  metrics:
19355
  - type: v_measure
19356
  value: 54.9896
 
19358
  value: 12.0517
19359
  - type: main_score
19360
  value: 54.9896
19361
+ - task:
19362
+ type: Retrieval
19363
+ dataset:
 
19364
  name: MTEB SCIDOCS (default)
 
 
19365
  type: mteb/scidocs
19366
+ config: default
19367
+ split: test
19368
+ revision: f8c2fcf00f625baaa80f62ec5bd9e1fff3b8ae88
19369
  metrics:
19370
  - type: ndcg_at_1
19371
  value: 20.599999999999998
 
19649
  value: 23.392599999999998
19650
  - type: main_score
19651
  value: 17.721
19652
+ - task:
19653
+ type: STS
19654
+ dataset:
 
19655
  name: MTEB SICK-R (default)
 
 
19656
  type: mteb/sickr-sts
19657
+ config: default
19658
+ split: test
19659
+ revision: 20a6d6f312dd54037fe07a32d58e5e168867909d
19660
  metrics:
19661
  - type: pearson
19662
  value: 75.5378
 
19676
  value: 68.7448
19677
  - type: main_score
19678
  value: 68.7448
19679
+ - task:
19680
  type: STS
19681
+ dataset:
 
19682
  name: MTEB STS12 (default)
 
 
19683
  type: mteb/sts12-sts
19684
+ config: default
19685
+ split: test
19686
+ revision: a0d554a64d88156834ff5ae9920b964011b16384
19687
  metrics:
19688
  - type: pearson
19689
  value: 81.6341
 
19703
  value: 75.1934
19704
  - type: main_score
19705
  value: 75.1911
19706
+ - task:
19707
  type: STS
19708
+ dataset:
 
19709
  name: MTEB STS13 (default)
 
 
19710
  type: mteb/sts13-sts
19711
+ config: default
19712
+ split: test
19713
+ revision: 7e90230a92c190f1bf69ae9002b8cea547a64cca
19714
  metrics:
19715
  - type: pearson
19716
  value: 76.4378
 
19730
  value: 77.3053
19731
  - type: main_score
19732
  value: 77.3053
19733
+ - task:
19734
  type: STS
19735
+ dataset:
 
19736
  name: MTEB STS14 (default)
 
 
19737
  type: mteb/sts14-sts
19738
+ config: default
19739
+ split: test
19740
+ revision: 6031580fec1f6af667f0bd2da0a551cf4f0b2375
19741
  metrics:
19742
  - type: pearson
19743
  value: 78.4342
 
19757
  value: 74.94800000000001
19758
  - type: main_score
19759
  value: 74.9479
19760
+ - task:
19761
  type: STS
19762
+ dataset:
 
19763
  name: MTEB STS15 (default)
 
 
19764
  type: mteb/sts15-sts
19765
+ config: default
19766
+ split: test
19767
+ revision: ae752c7c21bf194d8b67fd573edf7ae58183cbe3
19768
  metrics:
19769
  - type: pearson
19770
  value: 85.1908
 
19784
  value: 86.0174
19785
  - type: main_score
19786
  value: 86.0174
19787
+ - task:
19788
  type: STS
19789
+ dataset:
 
19790
  name: MTEB STS16 (default)
 
 
19791
  type: mteb/sts16-sts
19792
+ config: default
19793
+ split: test
19794
+ revision: 4d8694f8f0e0100860b497b999b3dbed754a0513
19795
  metrics:
19796
  - type: pearson
19797
  value: 80.5421
 
19811
  value: 81.9568
19812
  - type: main_score
19813
  value: 81.9568
19814
+ - task:
19815
  type: STS
19816
+ dataset:
 
19817
  name: MTEB STS17 (en-tr)
 
 
19818
  type: mteb/sts17-crosslingual-sts
19819
+ config: en-tr
19820
+ split: test
19821
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19822
  metrics:
19823
  - type: pearson
19824
  value: 48.2717
 
19838
  value: 44.642900000000004
19839
  - type: main_score
19840
  value: 44.642900000000004
19841
+ - task:
19842
  type: STS
19843
+ dataset:
 
19844
  name: MTEB STS17 (it-en)
 
 
19845
  type: mteb/sts17-crosslingual-sts
19846
+ config: it-en
19847
+ split: test
19848
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19849
  metrics:
19850
  - type: pearson
19851
  value: 67.8601
 
19865
  value: 68.2763
19866
  - type: main_score
19867
  value: 68.2763
19868
+ - task:
19869
  type: STS
19870
+ dataset:
 
19871
  name: MTEB STS17 (en-en)
 
 
19872
  type: mteb/sts17-crosslingual-sts
19873
+ config: en-en
19874
+ split: test
19875
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19876
  metrics:
19877
  - type: pearson
19878
  value: 78.05539999999999
 
19892
  value: 78.5929
19893
  - type: main_score
19894
  value: 78.5929
19895
+ - task:
19896
  type: STS
19897
+ dataset:
 
19898
  name: MTEB STS17 (en-ar)
 
 
19899
  type: mteb/sts17-crosslingual-sts
19900
+ config: en-ar
19901
+ split: test
19902
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19903
  metrics:
19904
  - type: pearson
19905
  value: 59.4349
 
19919
  value: 59.838800000000006
19920
  - type: main_score
19921
  value: 59.838800000000006
19922
+ - task:
19923
  type: STS
19924
+ dataset:
 
19925
  name: MTEB STS17 (fr-en)
 
 
19926
  type: mteb/sts17-crosslingual-sts
19927
+ config: fr-en
19928
+ split: test
19929
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19930
  metrics:
19931
  - type: pearson
19932
  value: 73.84039999999999
 
19946
  value: 74.2498
19947
  - type: main_score
19948
  value: 74.2498
19949
+ - task:
19950
  type: STS
19951
+ dataset:
 
19952
  name: MTEB STS17 (nl-en)
 
 
19953
  type: mteb/sts17-crosslingual-sts
19954
+ config: nl-en
19955
+ split: test
19956
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19957
  metrics:
19958
  - type: pearson
19959
  value: 67.9218
 
19973
  value: 68.0418
19974
  - type: main_score
19975
  value: 68.0418
19976
+ - task:
19977
  type: STS
19978
+ dataset:
 
19979
  name: MTEB STS17 (es-en)
 
 
19980
  type: mteb/sts17-crosslingual-sts
19981
+ config: es-en
19982
+ split: test
19983
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
19984
  metrics:
19985
  - type: pearson
19986
  value: 70.381
 
20000
  value: 69.5729
20001
  - type: main_score
20002
  value: 69.5729
20003
+ - task:
20004
  type: STS
20005
+ dataset:
 
20006
  name: MTEB STS17 (en-de)
 
 
20007
  type: mteb/sts17-crosslingual-sts
20008
+ config: en-de
20009
+ split: test
20010
+ revision: faeb762787bd10488a50c8b5be4a3b82e411949c
20011
  metrics:
20012
  - type: pearson
20013
  value: 70.0196
 
20027
  value: 69.7175
20028
  - type: main_score
20029
  value: 69.7175
20030
+ - task:
20031
  type: STS
20032
+ dataset:
 
20033
  name: MTEB STS22 (de-en)
 
 
20034
  type: mteb/sts22-crosslingual-sts
20035
+ config: de-en
20036
+ split: test
20037
+ revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
20038
  metrics:
20039
  - type: pearson
20040
  value: 65.7536
 
20054
  value: 60.04429999999999
20055
  - type: main_score
20056
  value: 60.04429999999999
20057
+ - task:
20058
  type: STS
20059
+ dataset:
 
20060
  name: MTEB STS22 (en)
 
 
20061
  type: mteb/sts22-crosslingual-sts
20062
+ config: en
20063
+ split: test
20064
+ revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
20065
  metrics:
20066
  - type: pearson
20067
  value: 68.997
 
20081
  value: 68.1508
20082
  - type: main_score
20083
  value: 68.1508
20084
+ - task:
20085
  type: STS
20086
+ dataset:
 
20087
  name: MTEB STS22 (es-en)
 
 
20088
  type: mteb/sts22-crosslingual-sts
20089
+ config: es-en
20090
+ split: test
20091
+ revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
20092
  metrics:
20093
  - type: pearson
20094
  value: 80.2006
 
20108
  value: 80.4702
20109
  - type: main_score
20110
  value: 80.4702
20111
+ - task:
20112
  type: STS
20113
+ dataset:
 
20114
  name: MTEB STS22 (zh-en)
 
 
20115
  type: mteb/sts22-crosslingual-sts
20116
+ config: zh-en
20117
+ split: test
20118
+ revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
20119
  metrics:
20120
  - type: pearson
20121
  value: 74.0885
 
20135
  value: 72.4574
20136
  - type: main_score
20137
  value: 72.4574
20138
+ - task:
20139
  type: STS
20140
+ dataset:
 
20141
  name: MTEB STS22 (pl-en)
 
 
20142
  type: mteb/sts22-crosslingual-sts
20143
+ config: pl-en
20144
+ split: test
20145
+ revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
20146
  metrics:
20147
  - type: pearson
20148
  value: 74.1794
 
20162
  value: 70.6749
20163
  - type: main_score
20164
  value: 70.6749
20165
+ - task:
20166
  type: STS
20167
+ dataset:
 
20168
  name: MTEB STSBenchmark (default)
 
 
20169
  type: mteb/stsbenchmark-sts
20170
+ config: default
20171
+ split: test
20172
+ revision: b0fddb56ed78048fa8b90373c8a3cfc37b684831
20173
  metrics:
20174
  - type: pearson
20175
  value: 76.7328
 
20189
  value: 78.4076
20190
  - type: main_score
20191
  value: 78.4076
20192
+ - task:
20193
+ type: Reranking
20194
+ dataset:
 
20195
  name: MTEB SciDocsRR (default)
 
 
20196
  type: mteb/scidocs-reranking
20197
+ config: default
20198
+ split: test
20199
+ revision: d3c5e1fc0b855ab6097bf1cda04dd73947d7caab
20200
  metrics:
20201
  - type: map
20202
  value: 79.6097
 
20216
  value: 49.763600000000004
20217
  - type: main_score
20218
  value: 79.6097
20219
+ - task:
20220
+ type: Retrieval
20221
+ dataset:
 
20222
  name: MTEB SciFact (default)
 
 
20223
  type: mteb/scifact
20224
+ config: default
20225
+ split: test
20226
+ revision: 0228b52cf27578f30900b9e5271d331663a030d7
20227
  metrics:
20228
  - type: ndcg_at_1
20229
  value: 54.0
 
20507
  value: 66.43050000000001
20508
  - type: main_score
20509
  value: 65.847
20510
+ - task:
20511
+ type: PairClassification
20512
+ dataset:
 
20513
  name: MTEB SprintDuplicateQuestions (default)
 
 
20514
  type: mteb/sprintduplicatequestions-pairclassification
20515
+ config: default
20516
+ split: test
20517
+ revision: d66bd1f72af766a5cc4b0ca5e00c162f89e8cc46
20518
  metrics:
20519
  - type: similarity_accuracy
20520
  value: 99.7386
 
20598
  value: 93.50840000000001
20599
  - type: main_score
20600
  value: 93.50840000000001
20601
+ - task:
20602
+ type: Clustering
20603
+ dataset:
 
20604
  name: MTEB StackExchangeClustering (default)
 
 
20605
  type: mteb/stackexchange-clustering
20606
+ config: default
20607
+ split: test
20608
+ revision: 6cbc1f7b2bc0622f2e39d2c77fa502909748c259
20609
  metrics:
20610
  - type: v_measure
20611
  value: 55.9311
 
20613
  value: 5.0881
20614
  - type: main_score
20615
  value: 55.9311
20616
+ - task:
20617
  type: Clustering
20618
+ dataset:
 
20619
  name: MTEB StackExchangeClusteringP2P (default)
 
 
20620
  type: mteb/stackexchange-clustering-p2p
20621
+ config: default
20622
+ split: test
20623
+ revision: 815ca46b2622cec33ccafc3735d572c266efdb44
20624
  metrics:
20625
  - type: v_measure
20626
  value: 32.9298
 
20628
  value: 1.7169
20629
  - type: main_score
20630
  value: 32.9298
20631
+ - task:
20632
+ type: Reranking
20633
+ dataset:
 
20634
  name: MTEB StackOverflowDupQuestions (default)
 
 
20635
  type: mteb/stackoverflowdupquestions-reranking
20636
+ config: default
20637
+ split: test
20638
+ revision: e185fbe320c72810689fc5848eb6114e1ef5ec69
20639
  metrics:
20640
  - type: map
20641
  value: 51.7759
 
20655
  value: 37.3753
20656
  - type: main_score
20657
  value: 51.7759
20658
+ - task:
20659
+ type: Retrieval
20660
+ dataset:
 
20661
  name: MTEB StackOverflowQA (default)
 
 
20662
  type: CoIR-Retrieval/stackoverflow-qa
20663
+ config: default
20664
+ split: test
20665
+ revision: db8f169f3894c14a00251061f957b2063eef2bd5
20666
  metrics:
20667
  - type: ndcg_at_1
20668
  value: 68.205
 
20946
  value: 77.837
20947
  - type: main_score
20948
  value: 78.45
20949
+ - task:
20950
+ type: Summarization
20951
+ dataset:
 
20952
  name: MTEB SummEval (default)
 
 
20953
  type: mteb/summeval
20954
+ config: default
20955
+ split: test
20956
+ revision: cda12ad7615edc362dbf25a00fdd61d3b1eaf93c
20957
  metrics:
20958
  - type: pearson
20959
  value: 31.7097
 
20969
  value: 31.7097
20970
  - type: main_score
20971
  value: 32.0256
20972
+ - task:
20973
+ type: Retrieval
20974
+ dataset:
 
20975
  name: MTEB SyntheticText2SQL (default)
 
 
20976
  type: CoIR-Retrieval/synthetic-text2sql
20977
+ config: default
20978
+ split: test
20979
+ revision: 686b87296c3a0191b5d9415a00526c62db9fce09
20980
  metrics:
20981
  - type: ndcg_at_1
20982
  value: 3.5549999999999997
 
21260
  value: -53.976
21261
  - type: main_score
21262
  value: 47.344
21263
+ - task:
21264
  type: Retrieval
21265
+ dataset:
 
21266
  name: MTEB TRECCOVID (default)
 
 
21267
  type: mteb/trec-covid
21268
+ config: default
21269
+ split: test
21270
+ revision: bb9466bac8153a0349341eb1b22e06409e78ef4e
21271
  metrics:
21272
  - type: ndcg_at_1
21273
  value: 70.0
 
21551
  value: -26.309700000000003
21552
  - type: main_score
21553
  value: 68.573
21554
+ - task:
21555
  type: Retrieval
21556
+ dataset:
 
21557
  name: MTEB Touche2020 (default)
 
 
21558
  type: mteb/touche2020
21559
+ config: default
21560
+ split: test
21561
+ revision: a34f9a33db75fa0cbb21bb5cfc3dae8dc8bec93f
21562
  metrics:
21563
  - type: ndcg_at_1
21564
  value: 41.837
 
21842
  value: -19.7412
21843
  - type: main_score
21844
  value: 27.306
21845
+ - task:
21846
+ type: Classification
21847
+ dataset:
 
21848
  name: MTEB ToxicConversationsClassification (default)
 
 
21849
  type: mteb/toxic_conversations_50k
21850
+ config: default
21851
+ split: test
21852
+ revision: edfaf9da55d3dd50d43143d90c1ac476895ae6de
21853
  metrics:
21854
  - type: accuracy
21855
  value: 62.480500000000006
 
21863
  value: 10.9948
21864
  - type: main_score
21865
  value: 62.480500000000006
21866
+ - task:
21867
  type: Classification
21868
+ dataset:
 
21869
  name: MTEB TweetSentimentExtractionClassification (default)
 
 
21870
  type: mteb/tweet_sentiment_extraction
21871
+ config: default
21872
+ split: test
21873
+ revision: d604517c81ca91fe16a244d1248fc021f9ecee7a
21874
  metrics:
21875
  - type: accuracy
21876
  value: 58.3616
 
21880
  value: 57.801
21881
  - type: main_score
21882
  value: 58.3616
21883
+ - task:
21884
+ type: Clustering
21885
+ dataset:
 
21886
  name: MTEB TwentyNewsgroupsClustering (default)
 
 
21887
  type: mteb/twentynewsgroups-clustering
21888
+ config: default
21889
+ split: test
21890
+ revision: 6125ec4e24fa026cec8a478383ee943acfbd5449
21891
  metrics:
21892
  - type: v_measure
21893
  value: 38.6199
 
21895
  value: 2.3855999999999997
21896
  - type: main_score
21897
  value: 38.6199
21898
+ - task:
21899
+ type: PairClassification
21900
+ dataset:
 
21901
  name: MTEB TwitterSemEval2015 (default)
 
 
21902
  type: mteb/twittersemeval2015-pairclassification
21903
+ config: default
21904
+ split: test
21905
+ revision: 70970daeab8776df92f5ea462b6173c0b46fd2d1
21906
  metrics:
21907
  - type: similarity_accuracy
21908
  value: 82.9886
 
21986
  value: 64.4131
21987
  - type: main_score
21988
  value: 64.4131
21989
+ - task:
21990
  type: PairClassification
21991
+ dataset:
 
21992
  name: MTEB TwitterURLCorpus (default)
 
 
21993
  type: mteb/twitterurlcorpus-pairclassification
21994
+ config: default
21995
+ split: test
21996
+ revision: 8b6510b0b1fa4e4c4f879467980e9be563ec1cdf
21997
  metrics:
21998
  - type: similarity_accuracy
21999
  value: 88.95100000000001
 
22077
  value: 86.1545
22078
  - type: main_score
22079
  value: 86.1545
 
 
 
22080
  ---
22081
  # Granite-Embedding-278m-multilingual
22082
 
onnx/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_attn_implementation_autoset": true,
3
+ "architectures": [
4
+ "XLMRobertaModel"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "classifier_dropout": null,
9
+ "eos_token_id": 2,
10
+ "export_model_type": "transformer",
11
+ "hidden_act": "gelu",
12
+ "hidden_dropout_prob": 0.1,
13
+ "hidden_size": 768,
14
+ "initializer_range": 0.02,
15
+ "intermediate_size": 3072,
16
+ "layer_norm_eps": 1e-05,
17
+ "max_position_embeddings": 514,
18
+ "model_type": "xlm-roberta",
19
+ "num_attention_heads": 12,
20
+ "num_hidden_layers": 12,
21
+ "pad_token_id": 1,
22
+ "position_embedding_type": "absolute",
23
+ "torch_dtype": "float32",
24
+ "transformers_version": "4.51.3",
25
+ "type_vocab_size": 1,
26
+ "use_cache": true,
27
+ "vocab_size": 250002
28
+ }
onnx/model.onnx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4772f27fcc55b1349d7c7b71fb9f98301f55535dfed68891caf782d4b75a53f0
3
+ size 1110155573
onnx/special_tokens_map.json ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "cls_token": {
10
+ "content": "<s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "eos_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "mask_token": {
24
+ "content": "<mask>",
25
+ "lstrip": true,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "pad_token": {
31
+ "content": "<pad>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ },
37
+ "sep_token": {
38
+ "content": "</s>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false
43
+ },
44
+ "unk_token": {
45
+ "content": "<unk>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false
50
+ }
51
+ }
onnx/tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a56def25aa40facc030ea8b0b87f3688e4b3c39eb8b45d5702b3a1300fe2a20
3
+ size 17082734
onnx/tokenizer_config.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<s>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "<pad>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "</s>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "<unk>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "250001": {
36
+ "content": "<mask>",
37
+ "lstrip": true,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "bos_token": "<s>",
45
+ "clean_up_tokenization_spaces": false,
46
+ "cls_token": "<s>",
47
+ "eos_token": "</s>",
48
+ "extra_special_tokens": {},
49
+ "mask_token": "<mask>",
50
+ "model_max_length": 512,
51
+ "pad_token": "<pad>",
52
+ "sep_token": "</s>",
53
+ "tokenizer_class": "XLMRobertaTokenizer",
54
+ "unk_token": "<unk>"
55
+ }