Training in progress, step 5000
Browse files- 1_Pooling/config.json +3 -3
- Information-Retrieval_evaluation_BeIR-touche2020-subset-test_results.csv +1 -0
- Information-Retrieval_evaluation_NanoArguAna_results.csv +1 -0
- Information-Retrieval_evaluation_NanoClimateFEVER_results.csv +1 -0
- Information-Retrieval_evaluation_NanoDBPedia_results.csv +1 -0
- Information-Retrieval_evaluation_NanoFEVER_results.csv +1 -0
- Information-Retrieval_evaluation_NanoFiQA2018_results.csv +1 -0
- Information-Retrieval_evaluation_NanoHotpotQA_results.csv +1 -0
- Information-Retrieval_evaluation_NanoMSMARCO_results.csv +1 -0
- Information-Retrieval_evaluation_NanoNFCorpus_results.csv +1 -0
- Information-Retrieval_evaluation_NanoNQ_results.csv +1 -0
- Information-Retrieval_evaluation_NanoQuoraRetrieval_results.csv +1 -0
- Information-Retrieval_evaluation_NanoSCIDOCS_results.csv +1 -0
- Information-Retrieval_evaluation_NanoSciFact_results.csv +1 -0
- Information-Retrieval_evaluation_NanoTouche2020_results.csv +1 -0
- NanoBEIR_evaluation_mean_results.csv +1 -0
- README.md +80 -371
- config.json +36 -15
- eval/Information-Retrieval_evaluation_NanoMSMARCO_results.csv +21 -0
- eval/Information-Retrieval_evaluation_NanoNQ_results.csv +21 -0
- eval/NanoBEIR_evaluation_mean_results.csv +21 -0
- final_metrics.json +213 -213
- model.safetensors +2 -2
- modules.json +0 -6
- special_tokens_map.json +1 -1
- tokenizer.json +2 -2
- tokenizer_config.json +900 -20
1_Pooling/config.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
{
|
| 2 |
-
"word_embedding_dimension":
|
| 3 |
-
"pooling_mode_cls_token":
|
| 4 |
-
"pooling_mode_mean_tokens":
|
| 5 |
"pooling_mode_max_tokens": false,
|
| 6 |
"pooling_mode_mean_sqrt_len_tokens": false,
|
| 7 |
"pooling_mode_weightedmean_tokens": false,
|
|
|
|
| 1 |
{
|
| 2 |
+
"word_embedding_dimension": 512,
|
| 3 |
+
"pooling_mode_cls_token": true,
|
| 4 |
+
"pooling_mode_mean_tokens": false,
|
| 5 |
"pooling_mode_max_tokens": false,
|
| 6 |
"pooling_mode_mean_sqrt_len_tokens": false,
|
| 7 |
"pooling_mode_weightedmean_tokens": false,
|
Information-Retrieval_evaluation_BeIR-touche2020-subset-test_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.7551020408163265,0.8979591836734694,0.9387755102040817,1.0,0.7551020408163265,0.01681883497544576,0.6802721088435373,0.045273391320759554,0.6489795918367346,0.07198855467813525,0.563265306122449,0.12417651370073833,0.8328474246841594,0.6023501966395867,0.2434385293084787
|
| 3 |
-1,-1,0.7346938775510204,0.9387755102040817,0.9591836734693877,0.9795918367346939,0.7346938775510204,0.01628348794626864,0.6802721088435373,0.045158349206937544,0.6612244897959185,0.07322241057348641,0.5857142857142857,0.12952247914108056,0.8306122448979592,0.6211658380172899,0.24407616368208723
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.7551020408163265,0.8979591836734694,0.9387755102040817,1.0,0.7551020408163265,0.01681883497544576,0.6802721088435373,0.045273391320759554,0.6489795918367346,0.07198855467813525,0.563265306122449,0.12417651370073833,0.8328474246841594,0.6023501966395867,0.2434385293084787
|
| 3 |
-1,-1,0.7346938775510204,0.9387755102040817,0.9591836734693877,0.9795918367346939,0.7346938775510204,0.01628348794626864,0.6802721088435373,0.045158349206937544,0.6612244897959185,0.07322241057348641,0.5857142857142857,0.12952247914108056,0.8306122448979592,0.6211658380172899,0.24407616368208723
|
| 4 |
+
-1,-1,0.6530612244897959,0.9591836734693877,0.9591836734693877,0.9795918367346939,0.6530612244897959,0.01432104834093062,0.727891156462585,0.048281100280382724,0.6448979591836735,0.07113270115683268,0.5795918367346938,0.1277462197359846,0.7950680272108844,0.6075766364842123,0.25739036878474303
|
Information-Retrieval_evaluation_NanoArguAna_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.18,0.5,0.66,0.74,0.18,0.18,0.16666666666666663,0.5,0.13200000000000003,0.66,0.07400000000000001,0.74,0.3599682539682539,0.45218312003145433,0.3658170202780539
|
| 3 |
-1,-1,0.14,0.38,0.5,0.66,0.14,0.14,0.12666666666666665,0.38,0.1,0.5,0.06600000000000002,0.66,0.300095238095238,0.38615266678375515,0.3088425567963239
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.18,0.5,0.66,0.74,0.18,0.18,0.16666666666666663,0.5,0.13200000000000003,0.66,0.07400000000000001,0.74,0.3599682539682539,0.45218312003145433,0.3658170202780539
|
| 3 |
-1,-1,0.14,0.38,0.5,0.66,0.14,0.14,0.12666666666666665,0.38,0.1,0.5,0.06600000000000002,0.66,0.300095238095238,0.38615266678375515,0.3088425567963239
|
| 4 |
+
-1,-1,0.22,0.54,0.58,0.74,0.22,0.22,0.18,0.54,0.11600000000000002,0.58,0.07400000000000001,0.74,0.3815793650793651,0.4674888162177975,0.3917367299367299
|
Information-Retrieval_evaluation_NanoClimateFEVER_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.1,0.32,0.44,0.6,0.1,0.04333333333333333,0.11333333333333333,0.154,0.092,0.214,0.066,0.2723333333333333,0.23579365079365078,0.18832347198247595,0.13278630044723194
|
| 3 |
-1,-1,0.1,0.26,0.34,0.6,0.1,0.04666666666666666,0.08666666666666666,0.12399999999999999,0.07200000000000001,0.16899999999999998,0.066,0.2973333333333333,0.21938095238095237,0.1853895398720514,0.1267197025068282
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.1,0.32,0.44,0.6,0.1,0.04333333333333333,0.11333333333333333,0.154,0.092,0.214,0.066,0.2723333333333333,0.23579365079365078,0.18832347198247595,0.13278630044723194
|
| 3 |
-1,-1,0.1,0.26,0.34,0.6,0.1,0.04666666666666666,0.08666666666666666,0.12399999999999999,0.07200000000000001,0.16899999999999998,0.066,0.2973333333333333,0.21938095238095237,0.1853895398720514,0.1267197025068282
|
| 4 |
+
-1,-1,0.22,0.36,0.44,0.68,0.22,0.10166666666666666,0.12,0.1433333333333333,0.09200000000000001,0.19666666666666666,0.08,0.32233333333333336,0.3309126984126983,0.24154521021050848,0.17419838412151278
|
Information-Retrieval_evaluation_NanoDBPedia_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.58,0.82,0.86,0.88,0.58,0.05774032197349101,0.5266666666666666,0.13525154291594316,0.444,0.17429307751101658,0.36199999999999993,0.24557535689700863,0.7,0.46037876007701023,0.32047705412103555
|
| 3 |
-1,-1,0.68,0.8,0.86,0.88,0.68,0.08435515343632806,0.5066666666666666,0.13854094008435847,0.44800000000000006,0.17555319487559032,0.376,0.25077742902228617,0.7498333333333334,0.4897177201468777,0.34021634749539026
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.58,0.82,0.86,0.88,0.58,0.05774032197349101,0.5266666666666666,0.13525154291594316,0.444,0.17429307751101658,0.36199999999999993,0.24557535689700863,0.7,0.46037876007701023,0.32047705412103555
|
| 3 |
-1,-1,0.68,0.8,0.86,0.88,0.68,0.08435515343632806,0.5066666666666666,0.13854094008435847,0.44800000000000006,0.17555319487559032,0.376,0.25077742902228617,0.7498333333333334,0.4897177201468777,0.34021634749539026
|
| 4 |
+
-1,-1,0.62,0.78,0.84,0.92,0.62,0.07067219113244924,0.5199999999999999,0.1473605766367288,0.452,0.18535916558236945,0.364,0.27484027728017424,0.7183571428571429,0.4817256707832707,0.3555155246867996
|
Information-Retrieval_evaluation_NanoFEVER_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.56,0.8,0.88,0.88,0.56,0.5266666666666666,0.28,0.7533333333333333,0.184,0.8333333333333333,0.092,0.8333333333333333,0.6789999999999999,0.7016221865098926,0.6483203115492273
|
| 3 |
-1,-1,0.6,0.76,0.86,0.88,0.6,0.5466666666666666,0.2533333333333333,0.7066666666666666,0.17199999999999996,0.7966666666666665,0.088,0.8166666666666665,0.6975555555555556,0.6976146491512496,0.6474697943858455
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.56,0.8,0.88,0.88,0.56,0.5266666666666666,0.28,0.7533333333333333,0.184,0.8333333333333333,0.092,0.8333333333333333,0.6789999999999999,0.7016221865098926,0.6483203115492273
|
| 3 |
-1,-1,0.6,0.76,0.86,0.88,0.6,0.5466666666666666,0.2533333333333333,0.7066666666666666,0.17199999999999996,0.7966666666666665,0.088,0.8166666666666665,0.6975555555555556,0.6976146491512496,0.6474697943858455
|
| 4 |
+
-1,-1,0.76,0.84,0.9,0.9,0.76,0.7066666666666666,0.29333333333333333,0.7933333333333333,0.18799999999999997,0.8533333333333333,0.09399999999999999,0.8533333333333333,0.805,0.7928392587586685,0.7629176534259828
|
Information-Retrieval_evaluation_NanoFiQA2018_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.24,0.38,0.42,0.52,0.24,0.13307936507936508,0.15333333333333332,0.21543650793650795,0.11600000000000002,0.2601031746031746,0.07600000000000001,0.3241587301587302,0.3178809523809524,0.2635447070179544,0.22615568818713977
|
| 3 |
-1,-1,0.26,0.42,0.46,0.58,0.26,0.11974603174603175,0.18,0.25293650793650796,0.124,0.28343650793650793,0.08399999999999999,0.37834920634920627,0.34807936507936504,0.289405411189281,0.2371668409805281
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.24,0.38,0.42,0.52,0.24,0.13307936507936508,0.15333333333333332,0.21543650793650795,0.11600000000000002,0.2601031746031746,0.07600000000000001,0.3241587301587302,0.3178809523809524,0.2635447070179544,0.22615568818713977
|
| 3 |
-1,-1,0.26,0.42,0.46,0.58,0.26,0.11974603174603175,0.18,0.25293650793650796,0.124,0.28343650793650793,0.08399999999999999,0.37834920634920627,0.34807936507936504,0.289405411189281,0.2371668409805281
|
| 4 |
+
-1,-1,0.3,0.4,0.42,0.52,0.3,0.14474603174603173,0.1733333333333333,0.2201031746031746,0.124,0.2519365079365079,0.08,0.31682539682539684,0.35405555555555557,0.2761214091575939,0.23650883511668197
|
Information-Retrieval_evaluation_NanoHotpotQA_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.5,0.58,0.62,0.68,0.5,0.25,0.2333333333333333,0.35,0.16399999999999998,0.41,0.094,0.47,0.5529999999999999,0.43627646197603637,0.37828612061221206
|
| 3 |
-1,-1,0.54,0.64,0.7,0.74,0.54,0.27,0.26666666666666666,0.4,0.184,0.46,0.106,0.53,0.606,0.48356059599535955,0.4167638398079239
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.5,0.58,0.62,0.68,0.5,0.25,0.2333333333333333,0.35,0.16399999999999998,0.41,0.094,0.47,0.5529999999999999,0.43627646197603637,0.37828612061221206
|
| 3 |
-1,-1,0.54,0.64,0.7,0.74,0.54,0.27,0.26666666666666666,0.4,0.184,0.46,0.106,0.53,0.606,0.48356059599535955,0.4167638398079239
|
| 4 |
+
-1,-1,0.7,0.76,0.78,0.8,0.7,0.35,0.3733333333333333,0.56,0.244,0.61,0.13399999999999998,0.67,0.7373333333333334,0.6362341710243232,0.579012255659608
|
Information-Retrieval_evaluation_NanoMSMARCO_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.26,0.52,0.6,0.62,0.26,0.26,0.1733333333333333,0.52,0.12,0.6,0.062,0.62,0.40519047619047627,0.45904886208148177,0.4260102142025637
|
| 3 |
-1,-1,0.28,0.54,0.62,0.8,0.28,0.28,0.17999999999999997,0.54,0.124,0.62,0.08,0.8,0.43837301587301575,0.5241911345526384,0.4480618800320956
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.26,0.52,0.6,0.62,0.26,0.26,0.1733333333333333,0.52,0.12,0.6,0.062,0.62,0.40519047619047627,0.45904886208148177,0.4260102142025637
|
| 3 |
-1,-1,0.28,0.54,0.62,0.8,0.28,0.28,0.17999999999999997,0.54,0.124,0.62,0.08,0.8,0.43837301587301575,0.5241911345526384,0.4480618800320956
|
| 4 |
+
-1,-1,0.28,0.58,0.64,0.72,0.28,0.28,0.19333333333333333,0.58,0.128,0.64,0.07200000000000001,0.72,0.4386111111111111,0.5075011853031293,0.4533366047009664
|
Information-Retrieval_evaluation_NanoNFCorpus_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.32,0.5,0.52,0.64,0.32,0.01260685895239504,0.31333333333333335,0.03463231741620742,0.27599999999999997,0.06850002262294216,0.24399999999999994,0.10851262864104039,0.4212380952380952,0.2703690747449406,0.10029196368651581
|
| 3 |
-1,-1,0.34,0.52,0.58,0.68,0.34,0.012133063569139098,0.33333333333333326,0.05465564949455657,0.29600000000000004,0.07192795043792813,0.25,0.10822085751351866,0.44152380952380954,0.2847553576589848,0.108499059841261
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.32,0.5,0.52,0.64,0.32,0.01260685895239504,0.31333333333333335,0.03463231741620742,0.27599999999999997,0.06850002262294216,0.24399999999999994,0.10851262864104039,0.4212380952380952,0.2703690747449406,0.10029196368651581
|
| 3 |
-1,-1,0.34,0.52,0.58,0.68,0.34,0.012133063569139098,0.33333333333333326,0.05465564949455657,0.29600000000000004,0.07192795043792813,0.25,0.10822085751351866,0.44152380952380954,0.2847553576589848,0.108499059841261
|
| 4 |
+
-1,-1,0.38,0.46,0.54,0.64,0.38,0.012479157217241355,0.3,0.04881894595681059,0.3,0.06922116223257517,0.276,0.10938910626227699,0.45196825396825396,0.30283246736353403,0.12231981928859673
|
Information-Retrieval_evaluation_NanoNQ_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.32,0.5,0.6,0.62,0.32,0.3,0.1733333333333333,0.47,0.128,0.58,0.066,0.6,0.4272222222222222,0.4619884812398348,0.42411983365963474
|
| 3 |
-1,-1,0.36,0.56,0.6,0.62,0.36,0.35,0.19333333333333333,0.53,0.128,0.58,0.066,0.6,0.4625,0.490897686812855,0.46206363135240186
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.32,0.5,0.6,0.62,0.32,0.3,0.1733333333333333,0.47,0.128,0.58,0.066,0.6,0.4272222222222222,0.4619884812398348,0.42411983365963474
|
| 3 |
-1,-1,0.36,0.56,0.6,0.62,0.36,0.35,0.19333333333333333,0.53,0.128,0.58,0.066,0.6,0.4625,0.490897686812855,0.46206363135240186
|
| 4 |
+
-1,-1,0.32,0.54,0.6,0.66,0.32,0.3,0.18666666666666665,0.51,0.128,0.58,0.07,0.64,0.4465,0.48687028758380874,0.4417143853257704
|
Information-Retrieval_evaluation_NanoQuoraRetrieval_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.94,1.0,1.0,1.0,0.94,0.8273333333333334,0.4133333333333333,0.9653333333333333,0.25999999999999995,0.9793333333333334,0.13799999999999998,0.9966666666666666,0.9666666666666667,0.9697624312418531,0.9551233766233765
|
| 3 |
-1,-1,0.92,1.0,1.0,1.0,0.92,0.8073333333333332,0.4133333333333333,0.9653333333333333,0.264,0.986,0.13399999999999998,0.99,0.96,0.960129267031932,0.945489898989899
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.94,1.0,1.0,1.0,0.94,0.8273333333333334,0.4133333333333333,0.9653333333333333,0.25999999999999995,0.9793333333333334,0.13799999999999998,0.9966666666666666,0.9666666666666667,0.9697624312418531,0.9551233766233765
|
| 3 |
-1,-1,0.92,1.0,1.0,1.0,0.92,0.8073333333333332,0.4133333333333333,0.9653333333333333,0.264,0.986,0.13399999999999998,0.99,0.96,0.960129267031932,0.945489898989899
|
| 4 |
+
-1,-1,0.88,1.0,1.0,1.0,0.88,0.7773333333333332,0.41999999999999993,0.972,0.26799999999999996,0.9893333333333334,0.13599999999999998,0.9926666666666667,0.9366666666666665,0.9483612484877714,0.9296388888888888
|
Information-Retrieval_evaluation_NanoSCIDOCS_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.42,0.6,0.72,0.82,0.42,0.08866666666666667,0.33333333333333326,0.20866666666666664,0.272,0.2806666666666667,0.17999999999999997,0.3696666666666666,0.540047619047619,0.36082794471047336,0.2862806075456748
|
| 3 |
-1,-1,0.48,0.72,0.76,0.8,0.48,0.10166666666666666,0.3666666666666666,0.22766666666666666,0.28400000000000003,0.29266666666666663,0.19199999999999995,0.3946666666666666,0.5951904761904762,0.3895503827770311,0.3082897117657901
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.42,0.6,0.72,0.82,0.42,0.08866666666666667,0.33333333333333326,0.20866666666666664,0.272,0.2806666666666667,0.17999999999999997,0.3696666666666666,0.540047619047619,0.36082794471047336,0.2862806075456748
|
| 3 |
-1,-1,0.48,0.72,0.76,0.8,0.48,0.10166666666666666,0.3666666666666666,0.22766666666666666,0.28400000000000003,0.29266666666666663,0.19199999999999995,0.3946666666666666,0.5951904761904762,0.3895503827770311,0.3082897117657901
|
| 4 |
+
-1,-1,0.46,0.68,0.84,0.9,0.46,0.09666666666666666,0.3533333333333333,0.21766666666666665,0.304,0.31266666666666665,0.18999999999999997,0.3896666666666666,0.6114444444444443,0.3893008993021786,0.3091964898288773
|
Information-Retrieval_evaluation_NanoSciFact_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.38,0.46,0.5,0.58,0.38,0.345,0.15999999999999998,0.43,0.10800000000000003,0.475,0.068,0.58,0.44026984126984126,0.46384622999765257,0.43257979600699104
|
| 3 |
-1,-1,0.38,0.54,0.58,0.7,0.38,0.345,0.19999999999999996,0.525,0.12800000000000003,0.565,0.08,0.7,0.4820793650793651,0.5292195947118973,0.47730440170572996
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.38,0.46,0.5,0.58,0.38,0.345,0.15999999999999998,0.43,0.10800000000000003,0.475,0.068,0.58,0.44026984126984126,0.46384622999765257,0.43257979600699104
|
| 3 |
-1,-1,0.38,0.54,0.58,0.7,0.38,0.345,0.19999999999999996,0.525,0.12800000000000003,0.565,0.08,0.7,0.4820793650793651,0.5292195947118973,0.47730440170572996
|
| 4 |
+
-1,-1,0.58,0.68,0.7,0.72,0.58,0.545,0.2533333333333333,0.665,0.156,0.685,0.08199999999999999,0.71,0.6375,0.6523742480687815,0.6370166989443306
|
Information-Retrieval_evaluation_NanoTouche2020_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.46938775510204084,0.8367346938775511,0.9387755102040817,1.0,0.46938775510204084,0.032657982947973084,0.44897959183673464,0.09621881460341672,0.42040816326530606,0.1425551052100505,0.3346938775510204,0.22061476067159091,0.6573129251700679,0.3807140713282222,0.2698119698398041
|
| 3 |
-1,-1,0.5102040816326531,0.8163265306122449,0.8571428571428571,0.9591836734693877,0.5102040816326531,0.04030730530317779,0.45578231292517,0.10027039527564566,0.4040816326530612,0.14754618693234572,0.3428571428571428,0.2268233238254859,0.6711613216715256,0.3942611497955867,0.28013001290517386
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.46938775510204084,0.8367346938775511,0.9387755102040817,1.0,0.46938775510204084,0.032657982947973084,0.44897959183673464,0.09621881460341672,0.42040816326530606,0.1425551052100505,0.3346938775510204,0.22061476067159091,0.6573129251700679,0.3807140713282222,0.2698119698398041
|
| 3 |
-1,-1,0.5102040816326531,0.8163265306122449,0.8571428571428571,0.9591836734693877,0.5102040816326531,0.04030730530317779,0.45578231292517,0.10027039527564566,0.4040816326530612,0.14754618693234572,0.3428571428571428,0.2268233238254859,0.6711613216715256,0.3942611497955867,0.28013001290517386
|
| 4 |
+
-1,-1,0.4489795918367347,0.7755102040816326,0.8979591836734694,0.9795918367346939,0.4489795918367347,0.03398816288797225,0.41496598639455773,0.09117793391499442,0.4326530612244897,0.15321422858378142,0.3591836734693878,0.24000922572748823,0.6308309037900873,0.3959230964031327,0.3023791135389433
|
NanoBEIR_evaluation_mean_results.csv
CHANGED
|
@@ -1,3 +1,4 @@
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.4053375196232339,0.601287284144427,0.6737519623233909,0.7369230769230769,0.4053375196232339,0.23516034838101724,0.26838304552590264,0.37175942432349296,0.20895447409733128,0.43675267025234743,0.1428226059654631,0.4908354981821823,0.5156608233036803,0.45145275407225244,0.3820046351353431
|
| 3 |
-1,-1,0.4300156985871272,0.6120251177394034,0.6705494505494506,0.7614756671899527,0.4300156985871272,0.24183652979907766,0.2740345368916797,0.3803900122659796,0.20985243328100472,0.4344459364242849,0.14852747252747253,0.5194490371828588,0.536290187137126,0.46960347357534615,0.39284751373578397
|
|
|
|
|
|
| 1 |
epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accuracy@10,cosine-Precision@1,cosine-Recall@1,cosine-Precision@3,cosine-Recall@3,cosine-Precision@5,cosine-Recall@5,cosine-Precision@10,cosine-Recall@10,cosine-MRR@10,cosine-NDCG@10,cosine-MAP@100
|
| 2 |
-1,-1,0.4053375196232339,0.601287284144427,0.6737519623233909,0.7369230769230769,0.4053375196232339,0.23516034838101724,0.26838304552590264,0.37175942432349296,0.20895447409733128,0.43675267025234743,0.1428226059654631,0.4908354981821823,0.5156608233036803,0.45145275407225244,0.3820046351353431
|
| 3 |
-1,-1,0.4300156985871272,0.6120251177394034,0.6705494505494506,0.7614756671899527,0.4300156985871272,0.24183652979907766,0.2740345368916797,0.3803900122659796,0.20985243328100472,0.4344459364242849,0.14852747252747253,0.5194490371828588,0.536290187137126,0.46960347357534615,0.39284751373578397
|
| 4 |
+
-1,-1,0.47453689167974883,0.6458084772370486,0.7059968602825746,0.7830455259026687,0.47453689167974883,0.2799399135628483,0.290894819466248,0.42221492034192626,0.22558869701726847,0.4697485434104026,0.15470643642072213,0.536851077391949,0.5754430365552815,0.5060859975895767,0.4381147218048991
|
README.md
CHANGED
|
@@ -5,231 +5,51 @@ tags:
|
|
| 5 |
- feature-extraction
|
| 6 |
- dense
|
| 7 |
- generated_from_trainer
|
| 8 |
-
- dataset_size:
|
| 9 |
- loss:MultipleNegativesRankingLoss
|
| 10 |
-
base_model:
|
| 11 |
widget:
|
| 12 |
-
- source_sentence:
|
| 13 |
sentences:
|
| 14 |
-
-
|
| 15 |
-
-
|
| 16 |
-
- What
|
| 17 |
-
- source_sentence:
|
| 18 |
-
been underestimated?
|
| 19 |
sentences:
|
| 20 |
-
- How
|
| 21 |
-
-
|
| 22 |
-
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
sentences:
|
| 26 |
-
-
|
| 27 |
-
|
| 28 |
-
-
|
| 29 |
-
|
| 30 |
-
- Are there any platforms that provides end-to-end encryption for file transfer/
|
| 31 |
-
sharing?
|
| 32 |
-
- source_sentence: Why AAP’s MLA Dinesh Mohaniya has been arrested?
|
| 33 |
sentences:
|
| 34 |
-
- What are
|
| 35 |
-
-
|
| 36 |
-
-
|
| 37 |
-
- source_sentence: What is the
|
| 38 |
sentences:
|
| 39 |
-
-
|
| 40 |
-
|
| 41 |
-
- the
|
| 42 |
-
- What is the difference between economic growth and economic development?
|
| 43 |
pipeline_tag: sentence-similarity
|
| 44 |
library_name: sentence-transformers
|
| 45 |
-
metrics:
|
| 46 |
-
- cosine_accuracy@1
|
| 47 |
-
- cosine_accuracy@3
|
| 48 |
-
- cosine_accuracy@5
|
| 49 |
-
- cosine_accuracy@10
|
| 50 |
-
- cosine_precision@1
|
| 51 |
-
- cosine_precision@3
|
| 52 |
-
- cosine_precision@5
|
| 53 |
-
- cosine_precision@10
|
| 54 |
-
- cosine_recall@1
|
| 55 |
-
- cosine_recall@3
|
| 56 |
-
- cosine_recall@5
|
| 57 |
-
- cosine_recall@10
|
| 58 |
-
- cosine_ndcg@10
|
| 59 |
-
- cosine_mrr@10
|
| 60 |
-
- cosine_map@100
|
| 61 |
-
model-index:
|
| 62 |
-
- name: SentenceTransformer based on thenlper/gte-small
|
| 63 |
-
results:
|
| 64 |
-
- task:
|
| 65 |
-
type: information-retrieval
|
| 66 |
-
name: Information Retrieval
|
| 67 |
-
dataset:
|
| 68 |
-
name: NanoMSMARCO
|
| 69 |
-
type: NanoMSMARCO
|
| 70 |
-
metrics:
|
| 71 |
-
- type: cosine_accuracy@1
|
| 72 |
-
value: 0.28
|
| 73 |
-
name: Cosine Accuracy@1
|
| 74 |
-
- type: cosine_accuracy@3
|
| 75 |
-
value: 0.58
|
| 76 |
-
name: Cosine Accuracy@3
|
| 77 |
-
- type: cosine_accuracy@5
|
| 78 |
-
value: 0.64
|
| 79 |
-
name: Cosine Accuracy@5
|
| 80 |
-
- type: cosine_accuracy@10
|
| 81 |
-
value: 0.72
|
| 82 |
-
name: Cosine Accuracy@10
|
| 83 |
-
- type: cosine_precision@1
|
| 84 |
-
value: 0.28
|
| 85 |
-
name: Cosine Precision@1
|
| 86 |
-
- type: cosine_precision@3
|
| 87 |
-
value: 0.19333333333333333
|
| 88 |
-
name: Cosine Precision@3
|
| 89 |
-
- type: cosine_precision@5
|
| 90 |
-
value: 0.128
|
| 91 |
-
name: Cosine Precision@5
|
| 92 |
-
- type: cosine_precision@10
|
| 93 |
-
value: 0.07200000000000001
|
| 94 |
-
name: Cosine Precision@10
|
| 95 |
-
- type: cosine_recall@1
|
| 96 |
-
value: 0.28
|
| 97 |
-
name: Cosine Recall@1
|
| 98 |
-
- type: cosine_recall@3
|
| 99 |
-
value: 0.58
|
| 100 |
-
name: Cosine Recall@3
|
| 101 |
-
- type: cosine_recall@5
|
| 102 |
-
value: 0.64
|
| 103 |
-
name: Cosine Recall@5
|
| 104 |
-
- type: cosine_recall@10
|
| 105 |
-
value: 0.72
|
| 106 |
-
name: Cosine Recall@10
|
| 107 |
-
- type: cosine_ndcg@10
|
| 108 |
-
value: 0.5075011853031293
|
| 109 |
-
name: Cosine Ndcg@10
|
| 110 |
-
- type: cosine_mrr@10
|
| 111 |
-
value: 0.4386111111111111
|
| 112 |
-
name: Cosine Mrr@10
|
| 113 |
-
- type: cosine_map@100
|
| 114 |
-
value: 0.4533366047009664
|
| 115 |
-
name: Cosine Map@100
|
| 116 |
-
- task:
|
| 117 |
-
type: information-retrieval
|
| 118 |
-
name: Information Retrieval
|
| 119 |
-
dataset:
|
| 120 |
-
name: NanoNQ
|
| 121 |
-
type: NanoNQ
|
| 122 |
-
metrics:
|
| 123 |
-
- type: cosine_accuracy@1
|
| 124 |
-
value: 0.32
|
| 125 |
-
name: Cosine Accuracy@1
|
| 126 |
-
- type: cosine_accuracy@3
|
| 127 |
-
value: 0.54
|
| 128 |
-
name: Cosine Accuracy@3
|
| 129 |
-
- type: cosine_accuracy@5
|
| 130 |
-
value: 0.6
|
| 131 |
-
name: Cosine Accuracy@5
|
| 132 |
-
- type: cosine_accuracy@10
|
| 133 |
-
value: 0.66
|
| 134 |
-
name: Cosine Accuracy@10
|
| 135 |
-
- type: cosine_precision@1
|
| 136 |
-
value: 0.32
|
| 137 |
-
name: Cosine Precision@1
|
| 138 |
-
- type: cosine_precision@3
|
| 139 |
-
value: 0.18666666666666665
|
| 140 |
-
name: Cosine Precision@3
|
| 141 |
-
- type: cosine_precision@5
|
| 142 |
-
value: 0.128
|
| 143 |
-
name: Cosine Precision@5
|
| 144 |
-
- type: cosine_precision@10
|
| 145 |
-
value: 0.07
|
| 146 |
-
name: Cosine Precision@10
|
| 147 |
-
- type: cosine_recall@1
|
| 148 |
-
value: 0.3
|
| 149 |
-
name: Cosine Recall@1
|
| 150 |
-
- type: cosine_recall@3
|
| 151 |
-
value: 0.51
|
| 152 |
-
name: Cosine Recall@3
|
| 153 |
-
- type: cosine_recall@5
|
| 154 |
-
value: 0.58
|
| 155 |
-
name: Cosine Recall@5
|
| 156 |
-
- type: cosine_recall@10
|
| 157 |
-
value: 0.64
|
| 158 |
-
name: Cosine Recall@10
|
| 159 |
-
- type: cosine_ndcg@10
|
| 160 |
-
value: 0.48687028758380874
|
| 161 |
-
name: Cosine Ndcg@10
|
| 162 |
-
- type: cosine_mrr@10
|
| 163 |
-
value: 0.4465
|
| 164 |
-
name: Cosine Mrr@10
|
| 165 |
-
- type: cosine_map@100
|
| 166 |
-
value: 0.44172587957864395
|
| 167 |
-
name: Cosine Map@100
|
| 168 |
-
- task:
|
| 169 |
-
type: nano-beir
|
| 170 |
-
name: Nano BEIR
|
| 171 |
-
dataset:
|
| 172 |
-
name: NanoBEIR mean
|
| 173 |
-
type: NanoBEIR_mean
|
| 174 |
-
metrics:
|
| 175 |
-
- type: cosine_accuracy@1
|
| 176 |
-
value: 0.30000000000000004
|
| 177 |
-
name: Cosine Accuracy@1
|
| 178 |
-
- type: cosine_accuracy@3
|
| 179 |
-
value: 0.56
|
| 180 |
-
name: Cosine Accuracy@3
|
| 181 |
-
- type: cosine_accuracy@5
|
| 182 |
-
value: 0.62
|
| 183 |
-
name: Cosine Accuracy@5
|
| 184 |
-
- type: cosine_accuracy@10
|
| 185 |
-
value: 0.69
|
| 186 |
-
name: Cosine Accuracy@10
|
| 187 |
-
- type: cosine_precision@1
|
| 188 |
-
value: 0.30000000000000004
|
| 189 |
-
name: Cosine Precision@1
|
| 190 |
-
- type: cosine_precision@3
|
| 191 |
-
value: 0.19
|
| 192 |
-
name: Cosine Precision@3
|
| 193 |
-
- type: cosine_precision@5
|
| 194 |
-
value: 0.128
|
| 195 |
-
name: Cosine Precision@5
|
| 196 |
-
- type: cosine_precision@10
|
| 197 |
-
value: 0.07100000000000001
|
| 198 |
-
name: Cosine Precision@10
|
| 199 |
-
- type: cosine_recall@1
|
| 200 |
-
value: 0.29000000000000004
|
| 201 |
-
name: Cosine Recall@1
|
| 202 |
-
- type: cosine_recall@3
|
| 203 |
-
value: 0.5449999999999999
|
| 204 |
-
name: Cosine Recall@3
|
| 205 |
-
- type: cosine_recall@5
|
| 206 |
-
value: 0.61
|
| 207 |
-
name: Cosine Recall@5
|
| 208 |
-
- type: cosine_recall@10
|
| 209 |
-
value: 0.6799999999999999
|
| 210 |
-
name: Cosine Recall@10
|
| 211 |
-
- type: cosine_ndcg@10
|
| 212 |
-
value: 0.497185736443469
|
| 213 |
-
name: Cosine Ndcg@10
|
| 214 |
-
- type: cosine_mrr@10
|
| 215 |
-
value: 0.4425555555555556
|
| 216 |
-
name: Cosine Mrr@10
|
| 217 |
-
- type: cosine_map@100
|
| 218 |
-
value: 0.44753124213980516
|
| 219 |
-
name: Cosine Map@100
|
| 220 |
---
|
| 221 |
|
| 222 |
-
# SentenceTransformer based on
|
| 223 |
|
| 224 |
-
This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [
|
| 225 |
|
| 226 |
## Model Details
|
| 227 |
|
| 228 |
### Model Description
|
| 229 |
- **Model Type:** Sentence Transformer
|
| 230 |
-
- **Base model:** [
|
| 231 |
- **Maximum Sequence Length:** 128 tokens
|
| 232 |
-
- **Output Dimensionality:**
|
| 233 |
- **Similarity Function:** Cosine Similarity
|
| 234 |
<!-- - **Training Dataset:** Unknown -->
|
| 235 |
<!-- - **Language:** Unknown -->
|
|
@@ -246,8 +66,7 @@ This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [t
|
|
| 246 |
```
|
| 247 |
SentenceTransformer(
|
| 248 |
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False, 'architecture': 'BertModel'})
|
| 249 |
-
(1): Pooling({'word_embedding_dimension':
|
| 250 |
-
(2): Normalize()
|
| 251 |
)
|
| 252 |
```
|
| 253 |
|
|
@@ -266,23 +85,23 @@ Then you can load this model and run inference.
|
|
| 266 |
from sentence_transformers import SentenceTransformer
|
| 267 |
|
| 268 |
# Download from the 🤗 Hub
|
| 269 |
-
model = SentenceTransformer("
|
| 270 |
# Run inference
|
| 271 |
sentences = [
|
| 272 |
-
'What is the
|
| 273 |
-
'
|
| 274 |
-
'
|
| 275 |
]
|
| 276 |
embeddings = model.encode(sentences)
|
| 277 |
print(embeddings.shape)
|
| 278 |
-
# [3,
|
| 279 |
|
| 280 |
# Get the similarity scores for the embeddings
|
| 281 |
similarities = model.similarity(embeddings, embeddings)
|
| 282 |
print(similarities)
|
| 283 |
-
# tensor([[
|
| 284 |
-
# [
|
| 285 |
-
# [
|
| 286 |
```
|
| 287 |
|
| 288 |
<!--
|
|
@@ -309,65 +128,6 @@ You can finetune this model on your own dataset.
|
|
| 309 |
*List how the model may foreseeably be misused and address what users ought not to do with the model.*
|
| 310 |
-->
|
| 311 |
|
| 312 |
-
## Evaluation
|
| 313 |
-
|
| 314 |
-
### Metrics
|
| 315 |
-
|
| 316 |
-
#### Information Retrieval
|
| 317 |
-
|
| 318 |
-
* Datasets: `NanoMSMARCO` and `NanoNQ`
|
| 319 |
-
* Evaluated with [<code>InformationRetrievalEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator)
|
| 320 |
-
|
| 321 |
-
| Metric | NanoMSMARCO | NanoNQ |
|
| 322 |
-
|:--------------------|:------------|:-----------|
|
| 323 |
-
| cosine_accuracy@1 | 0.28 | 0.32 |
|
| 324 |
-
| cosine_accuracy@3 | 0.58 | 0.54 |
|
| 325 |
-
| cosine_accuracy@5 | 0.64 | 0.6 |
|
| 326 |
-
| cosine_accuracy@10 | 0.72 | 0.66 |
|
| 327 |
-
| cosine_precision@1 | 0.28 | 0.32 |
|
| 328 |
-
| cosine_precision@3 | 0.1933 | 0.1867 |
|
| 329 |
-
| cosine_precision@5 | 0.128 | 0.128 |
|
| 330 |
-
| cosine_precision@10 | 0.072 | 0.07 |
|
| 331 |
-
| cosine_recall@1 | 0.28 | 0.3 |
|
| 332 |
-
| cosine_recall@3 | 0.58 | 0.51 |
|
| 333 |
-
| cosine_recall@5 | 0.64 | 0.58 |
|
| 334 |
-
| cosine_recall@10 | 0.72 | 0.64 |
|
| 335 |
-
| **cosine_ndcg@10** | **0.5075** | **0.4869** |
|
| 336 |
-
| cosine_mrr@10 | 0.4386 | 0.4465 |
|
| 337 |
-
| cosine_map@100 | 0.4533 | 0.4417 |
|
| 338 |
-
|
| 339 |
-
#### Nano BEIR
|
| 340 |
-
|
| 341 |
-
* Dataset: `NanoBEIR_mean`
|
| 342 |
-
* Evaluated with [<code>NanoBEIREvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.NanoBEIREvaluator) with these parameters:
|
| 343 |
-
```json
|
| 344 |
-
{
|
| 345 |
-
"dataset_names": [
|
| 346 |
-
"msmarco",
|
| 347 |
-
"nq"
|
| 348 |
-
],
|
| 349 |
-
"dataset_id": "lightonai/NanoBEIR-en"
|
| 350 |
-
}
|
| 351 |
-
```
|
| 352 |
-
|
| 353 |
-
| Metric | Value |
|
| 354 |
-
|:--------------------|:-----------|
|
| 355 |
-
| cosine_accuracy@1 | 0.3 |
|
| 356 |
-
| cosine_accuracy@3 | 0.56 |
|
| 357 |
-
| cosine_accuracy@5 | 0.62 |
|
| 358 |
-
| cosine_accuracy@10 | 0.69 |
|
| 359 |
-
| cosine_precision@1 | 0.3 |
|
| 360 |
-
| cosine_precision@3 | 0.19 |
|
| 361 |
-
| cosine_precision@5 | 0.128 |
|
| 362 |
-
| cosine_precision@10 | 0.071 |
|
| 363 |
-
| cosine_recall@1 | 0.29 |
|
| 364 |
-
| cosine_recall@3 | 0.545 |
|
| 365 |
-
| cosine_recall@5 | 0.61 |
|
| 366 |
-
| cosine_recall@10 | 0.68 |
|
| 367 |
-
| **cosine_ndcg@10** | **0.4972** |
|
| 368 |
-
| cosine_mrr@10 | 0.4426 |
|
| 369 |
-
| cosine_map@100 | 0.4475 |
|
| 370 |
-
|
| 371 |
<!--
|
| 372 |
## Bias, Risks and Limitations
|
| 373 |
|
|
@@ -386,49 +146,23 @@ You can finetune this model on your own dataset.
|
|
| 386 |
|
| 387 |
#### Unnamed Dataset
|
| 388 |
|
| 389 |
-
* Size:
|
| 390 |
-
* Columns: <code>
|
| 391 |
-
* Approximate statistics based on the first 1000 samples:
|
| 392 |
-
| | anchor | positive | negative |
|
| 393 |
-
|:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
|
| 394 |
-
| type | string | string | string |
|
| 395 |
-
| details | <ul><li>min: 6 tokens</li><li>mean: 16.07 tokens</li><li>max: 53 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 16.03 tokens</li><li>max: 53 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 16.81 tokens</li><li>max: 58 tokens</li></ul> |
|
| 396 |
-
* Samples:
|
| 397 |
-
| anchor | positive | negative |
|
| 398 |
-
|:-------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------|
|
| 399 |
-
| <code>Which one is better Linux OS? Ubuntu or Mint?</code> | <code>Why do you use Linux Mint?</code> | <code>Which one is not better Linux OS ? Ubuntu or Mint ?</code> |
|
| 400 |
-
| <code>What is flow?</code> | <code>What is flow?</code> | <code>What are flow lines?</code> |
|
| 401 |
-
| <code>How is Trump planning to get Mexico to pay for his supposed wall?</code> | <code>How is it possible for Donald Trump to force Mexico to pay for the wall?</code> | <code>Why do we connect the positive terminal before the negative terminal to ground in a vehicle battery?</code> |
|
| 402 |
-
* Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
|
| 403 |
-
```json
|
| 404 |
-
{
|
| 405 |
-
"scale": 7.0,
|
| 406 |
-
"similarity_fct": "cos_sim",
|
| 407 |
-
"gather_across_devices": false
|
| 408 |
-
}
|
| 409 |
-
```
|
| 410 |
-
|
| 411 |
-
### Evaluation Dataset
|
| 412 |
-
|
| 413 |
-
#### Unnamed Dataset
|
| 414 |
-
|
| 415 |
-
* Size: 40,000 evaluation samples
|
| 416 |
-
* Columns: <code>anchor</code>, <code>positive</code>, and <code>negative</code>
|
| 417 |
* Approximate statistics based on the first 1000 samples:
|
| 418 |
-
| |
|
| 419 |
|:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
|
| 420 |
| type | string | string | string |
|
| 421 |
-
| details | <ul><li>min: 6 tokens</li><li>mean: 15.
|
| 422 |
* Samples:
|
| 423 |
-
|
|
| 424 |
-
|
| 425 |
-
| <code>
|
| 426 |
-
| <code>
|
| 427 |
-
| <code>
|
| 428 |
* Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
|
| 429 |
```json
|
| 430 |
{
|
| 431 |
-
"scale":
|
| 432 |
"similarity_fct": "cos_sim",
|
| 433 |
"gather_across_devices": false
|
| 434 |
}
|
|
@@ -437,49 +171,36 @@ You can finetune this model on your own dataset.
|
|
| 437 |
### Training Hyperparameters
|
| 438 |
#### Non-Default Hyperparameters
|
| 439 |
|
| 440 |
-
- `
|
| 441 |
-
- `
|
| 442 |
-
- `per_device_eval_batch_size`: 128
|
| 443 |
-
- `learning_rate`: 2e-05
|
| 444 |
-
- `weight_decay`: 0.0001
|
| 445 |
-
- `max_steps`: 5000
|
| 446 |
-
- `warmup_ratio`: 0.1
|
| 447 |
- `fp16`: True
|
| 448 |
-
- `
|
| 449 |
-
- `dataloader_num_workers`: 1
|
| 450 |
-
- `dataloader_prefetch_factor`: 1
|
| 451 |
-
- `load_best_model_at_end`: True
|
| 452 |
-
- `optim`: adamw_torch
|
| 453 |
-
- `ddp_find_unused_parameters`: False
|
| 454 |
-
- `push_to_hub`: True
|
| 455 |
-
- `hub_model_id`: redis/model-b-structured
|
| 456 |
-
- `eval_on_start`: True
|
| 457 |
|
| 458 |
#### All Hyperparameters
|
| 459 |
<details><summary>Click to expand</summary>
|
| 460 |
|
| 461 |
- `overwrite_output_dir`: False
|
| 462 |
- `do_predict`: False
|
| 463 |
-
- `eval_strategy`:
|
| 464 |
- `prediction_loss_only`: True
|
| 465 |
-
- `per_device_train_batch_size`:
|
| 466 |
-
- `per_device_eval_batch_size`:
|
| 467 |
- `per_gpu_train_batch_size`: None
|
| 468 |
- `per_gpu_eval_batch_size`: None
|
| 469 |
- `gradient_accumulation_steps`: 1
|
| 470 |
- `eval_accumulation_steps`: None
|
| 471 |
- `torch_empty_cache_steps`: None
|
| 472 |
-
- `learning_rate`:
|
| 473 |
-
- `weight_decay`: 0.
|
| 474 |
- `adam_beta1`: 0.9
|
| 475 |
- `adam_beta2`: 0.999
|
| 476 |
- `adam_epsilon`: 1e-08
|
| 477 |
-
- `max_grad_norm`: 1
|
| 478 |
-
- `num_train_epochs`: 3
|
| 479 |
-
- `max_steps`:
|
| 480 |
- `lr_scheduler_type`: linear
|
| 481 |
- `lr_scheduler_kwargs`: {}
|
| 482 |
-
- `warmup_ratio`: 0.
|
| 483 |
- `warmup_steps`: 0
|
| 484 |
- `log_level`: passive
|
| 485 |
- `log_level_replica`: warning
|
|
@@ -507,14 +228,14 @@ You can finetune this model on your own dataset.
|
|
| 507 |
- `tpu_num_cores`: None
|
| 508 |
- `tpu_metrics_debug`: False
|
| 509 |
- `debug`: []
|
| 510 |
-
- `dataloader_drop_last`:
|
| 511 |
-
- `dataloader_num_workers`:
|
| 512 |
-
- `dataloader_prefetch_factor`:
|
| 513 |
- `past_index`: -1
|
| 514 |
- `disable_tqdm`: False
|
| 515 |
- `remove_unused_columns`: True
|
| 516 |
- `label_names`: None
|
| 517 |
-
- `load_best_model_at_end`:
|
| 518 |
- `ignore_data_skip`: False
|
| 519 |
- `fsdp`: []
|
| 520 |
- `fsdp_min_num_params`: 0
|
|
@@ -524,23 +245,23 @@ You can finetune this model on your own dataset.
|
|
| 524 |
- `parallelism_config`: None
|
| 525 |
- `deepspeed`: None
|
| 526 |
- `label_smoothing_factor`: 0.0
|
| 527 |
-
- `optim`:
|
| 528 |
- `optim_args`: None
|
| 529 |
- `adafactor`: False
|
| 530 |
- `group_by_length`: False
|
| 531 |
- `length_column_name`: length
|
| 532 |
- `project`: huggingface
|
| 533 |
- `trackio_space_id`: trackio
|
| 534 |
-
- `ddp_find_unused_parameters`:
|
| 535 |
- `ddp_bucket_cap_mb`: None
|
| 536 |
- `ddp_broadcast_buffers`: False
|
| 537 |
- `dataloader_pin_memory`: True
|
| 538 |
- `dataloader_persistent_workers`: False
|
| 539 |
- `skip_memory_metrics`: True
|
| 540 |
- `use_legacy_prediction_loop`: False
|
| 541 |
-
- `push_to_hub`:
|
| 542 |
- `resume_from_checkpoint`: None
|
| 543 |
-
- `hub_model_id`:
|
| 544 |
- `hub_strategy`: every_save
|
| 545 |
- `hub_private_repo`: None
|
| 546 |
- `hub_always_push`: False
|
|
@@ -567,43 +288,31 @@ You can finetune this model on your own dataset.
|
|
| 567 |
- `neftune_noise_alpha`: None
|
| 568 |
- `optim_target_modules`: None
|
| 569 |
- `batch_eval_metrics`: False
|
| 570 |
-
- `eval_on_start`:
|
| 571 |
- `use_liger_kernel`: False
|
| 572 |
- `liger_kernel_config`: None
|
| 573 |
- `eval_use_gather_object`: False
|
| 574 |
- `average_tokens_across_devices`: True
|
| 575 |
- `prompts`: None
|
| 576 |
- `batch_sampler`: batch_sampler
|
| 577 |
-
- `multi_dataset_batch_sampler`:
|
| 578 |
- `router_mapping`: {}
|
| 579 |
- `learning_rate_mapping`: {}
|
| 580 |
|
| 581 |
</details>
|
| 582 |
|
| 583 |
### Training Logs
|
| 584 |
-
| Epoch | Step | Training Loss |
|
| 585 |
-
|
| 586 |
-
| 0
|
| 587 |
-
| 0.
|
| 588 |
-
| 0.
|
| 589 |
-
|
|
| 590 |
-
|
|
| 591 |
-
|
|
| 592 |
-
|
|
| 593 |
-
|
|
| 594 |
-
|
|
| 595 |
-
| 0.4035 | 2250 | 0.4752 | 0.3785 | 0.5105 | 0.5292 | 0.5198 |
|
| 596 |
-
| 0.4484 | 2500 | 0.4707 | 0.3758 | 0.5208 | 0.4986 | 0.5097 |
|
| 597 |
-
| 0.4932 | 2750 | 0.4646 | 0.3733 | 0.5182 | 0.5016 | 0.5099 |
|
| 598 |
-
| 0.5380 | 3000 | 0.4636 | 0.3713 | 0.5127 | 0.4969 | 0.5048 |
|
| 599 |
-
| 0.5829 | 3250 | 0.4602 | 0.3693 | 0.5112 | 0.4869 | 0.4991 |
|
| 600 |
-
| 0.6277 | 3500 | 0.4597 | 0.3678 | 0.5170 | 0.5000 | 0.5085 |
|
| 601 |
-
| 0.6725 | 3750 | 0.4555 | 0.3665 | 0.5127 | 0.4899 | 0.5013 |
|
| 602 |
-
| 0.7174 | 4000 | 0.4541 | 0.3661 | 0.5130 | 0.4869 | 0.5000 |
|
| 603 |
-
| 0.7622 | 4250 | 0.4528 | 0.3649 | 0.5078 | 0.4887 | 0.4982 |
|
| 604 |
-
| 0.8070 | 4500 | 0.4495 | 0.3643 | 0.5073 | 0.4867 | 0.4970 |
|
| 605 |
-
| 0.8519 | 4750 | 0.4524 | 0.3640 | 0.5049 | 0.4875 | 0.4962 |
|
| 606 |
-
| 0.8967 | 5000 | 0.4516 | 0.3637 | 0.5075 | 0.4869 | 0.4972 |
|
| 607 |
|
| 608 |
|
| 609 |
### Framework Versions
|
|
@@ -612,7 +321,7 @@ You can finetune this model on your own dataset.
|
|
| 612 |
- Transformers: 4.57.3
|
| 613 |
- PyTorch: 2.9.1+cu128
|
| 614 |
- Accelerate: 1.12.0
|
| 615 |
-
- Datasets:
|
| 616 |
- Tokenizers: 0.22.1
|
| 617 |
|
| 618 |
## Citation
|
|
|
|
| 5 |
- feature-extraction
|
| 6 |
- dense
|
| 7 |
- generated_from_trainer
|
| 8 |
+
- dataset_size:100000
|
| 9 |
- loss:MultipleNegativesRankingLoss
|
| 10 |
+
base_model: prajjwal1/bert-small
|
| 11 |
widget:
|
| 12 |
+
- source_sentence: How do I calculate IQ?
|
| 13 |
sentences:
|
| 14 |
+
- What is the easiest way to know my IQ?
|
| 15 |
+
- How do I calculate not IQ ?
|
| 16 |
+
- What are some creative and innovative business ideas with less investment in India?
|
| 17 |
+
- source_sentence: How can I learn martial arts in my home?
|
|
|
|
| 18 |
sentences:
|
| 19 |
+
- How can I learn martial arts by myself?
|
| 20 |
+
- What are the advantages and disadvantages of investing in gold?
|
| 21 |
+
- Can people see that I have looked at their pictures on instagram if I am not following
|
| 22 |
+
them?
|
| 23 |
+
- source_sentence: When Enterprise picks you up do you have to take them back?
|
| 24 |
sentences:
|
| 25 |
+
- Are there any software Training institute in Tuticorin?
|
| 26 |
+
- When Enterprise picks you up do you have to take them back?
|
| 27 |
+
- When Enterprise picks you up do them have to take youback?
|
| 28 |
+
- source_sentence: What are some non-capital goods?
|
|
|
|
|
|
|
|
|
|
| 29 |
sentences:
|
| 30 |
+
- What are capital goods?
|
| 31 |
+
- How is the value of [math]\pi[/math] calculated?
|
| 32 |
+
- What are some non-capital goods?
|
| 33 |
+
- source_sentence: What is the QuickBooks technical support phone number in New York?
|
| 34 |
sentences:
|
| 35 |
+
- What caused the Great Depression?
|
| 36 |
+
- Can I apply for PR in Canada?
|
| 37 |
+
- Which is the best QuickBooks Hosting Support Number in New York?
|
|
|
|
| 38 |
pipeline_tag: sentence-similarity
|
| 39 |
library_name: sentence-transformers
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
---
|
| 41 |
|
| 42 |
+
# SentenceTransformer based on prajjwal1/bert-small
|
| 43 |
|
| 44 |
+
This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [prajjwal1/bert-small](https://huggingface.co/prajjwal1/bert-small). It maps sentences & paragraphs to a 512-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
|
| 45 |
|
| 46 |
## Model Details
|
| 47 |
|
| 48 |
### Model Description
|
| 49 |
- **Model Type:** Sentence Transformer
|
| 50 |
+
- **Base model:** [prajjwal1/bert-small](https://huggingface.co/prajjwal1/bert-small) <!-- at revision 0ec5f86f27c1a77d704439db5e01c307ea11b9d4 -->
|
| 51 |
- **Maximum Sequence Length:** 128 tokens
|
| 52 |
+
- **Output Dimensionality:** 512 dimensions
|
| 53 |
- **Similarity Function:** Cosine Similarity
|
| 54 |
<!-- - **Training Dataset:** Unknown -->
|
| 55 |
<!-- - **Language:** Unknown -->
|
|
|
|
| 66 |
```
|
| 67 |
SentenceTransformer(
|
| 68 |
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False, 'architecture': 'BertModel'})
|
| 69 |
+
(1): Pooling({'word_embedding_dimension': 512, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
|
|
|
|
| 70 |
)
|
| 71 |
```
|
| 72 |
|
|
|
|
| 85 |
from sentence_transformers import SentenceTransformer
|
| 86 |
|
| 87 |
# Download from the 🤗 Hub
|
| 88 |
+
model = SentenceTransformer("sentence_transformers_model_id")
|
| 89 |
# Run inference
|
| 90 |
sentences = [
|
| 91 |
+
'What is the QuickBooks technical support phone number in New York?',
|
| 92 |
+
'Which is the best QuickBooks Hosting Support Number in New York?',
|
| 93 |
+
'Can I apply for PR in Canada?',
|
| 94 |
]
|
| 95 |
embeddings = model.encode(sentences)
|
| 96 |
print(embeddings.shape)
|
| 97 |
+
# [3, 512]
|
| 98 |
|
| 99 |
# Get the similarity scores for the embeddings
|
| 100 |
similarities = model.similarity(embeddings, embeddings)
|
| 101 |
print(similarities)
|
| 102 |
+
# tensor([[1.0000, 0.8563, 0.0594],
|
| 103 |
+
# [0.8563, 1.0000, 0.1245],
|
| 104 |
+
# [0.0594, 0.1245, 1.0000]])
|
| 105 |
```
|
| 106 |
|
| 107 |
<!--
|
|
|
|
| 128 |
*List how the model may foreseeably be misused and address what users ought not to do with the model.*
|
| 129 |
-->
|
| 130 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 131 |
<!--
|
| 132 |
## Bias, Risks and Limitations
|
| 133 |
|
|
|
|
| 146 |
|
| 147 |
#### Unnamed Dataset
|
| 148 |
|
| 149 |
+
* Size: 100,000 training samples
|
| 150 |
+
* Columns: <code>sentence_0</code>, <code>sentence_1</code>, and <code>sentence_2</code>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 151 |
* Approximate statistics based on the first 1000 samples:
|
| 152 |
+
| | sentence_0 | sentence_1 | sentence_2 |
|
| 153 |
|:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
|
| 154 |
| type | string | string | string |
|
| 155 |
+
| details | <ul><li>min: 6 tokens</li><li>mean: 15.79 tokens</li><li>max: 66 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 15.68 tokens</li><li>max: 66 tokens</li></ul> | <ul><li>min: 7 tokens</li><li>mean: 16.37 tokens</li><li>max: 67 tokens</li></ul> |
|
| 156 |
* Samples:
|
| 157 |
+
| sentence_0 | sentence_1 | sentence_2 |
|
| 158 |
+
|:-----------------------------------------------------------------|:-----------------------------------------------------------------|:----------------------------------------------------------------------------------|
|
| 159 |
+
| <code>Is masturbating bad for boys?</code> | <code>Is masturbating bad for boys?</code> | <code>How harmful or unhealthy is masturbation?</code> |
|
| 160 |
+
| <code>Does a train engine move in reverse?</code> | <code>Does a train engine move in reverse?</code> | <code>Time moves forward, not in reverse. Doesn't that make time a vector?</code> |
|
| 161 |
+
| <code>What is the most badass thing anyone has ever done?</code> | <code>What is the most badass thing anyone has ever done?</code> | <code>anyone is the most badass thing Whathas ever done?</code> |
|
| 162 |
* Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
|
| 163 |
```json
|
| 164 |
{
|
| 165 |
+
"scale": 20.0,
|
| 166 |
"similarity_fct": "cos_sim",
|
| 167 |
"gather_across_devices": false
|
| 168 |
}
|
|
|
|
| 171 |
### Training Hyperparameters
|
| 172 |
#### Non-Default Hyperparameters
|
| 173 |
|
| 174 |
+
- `per_device_train_batch_size`: 64
|
| 175 |
+
- `per_device_eval_batch_size`: 64
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 176 |
- `fp16`: True
|
| 177 |
+
- `multi_dataset_batch_sampler`: round_robin
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 178 |
|
| 179 |
#### All Hyperparameters
|
| 180 |
<details><summary>Click to expand</summary>
|
| 181 |
|
| 182 |
- `overwrite_output_dir`: False
|
| 183 |
- `do_predict`: False
|
| 184 |
+
- `eval_strategy`: no
|
| 185 |
- `prediction_loss_only`: True
|
| 186 |
+
- `per_device_train_batch_size`: 64
|
| 187 |
+
- `per_device_eval_batch_size`: 64
|
| 188 |
- `per_gpu_train_batch_size`: None
|
| 189 |
- `per_gpu_eval_batch_size`: None
|
| 190 |
- `gradient_accumulation_steps`: 1
|
| 191 |
- `eval_accumulation_steps`: None
|
| 192 |
- `torch_empty_cache_steps`: None
|
| 193 |
+
- `learning_rate`: 5e-05
|
| 194 |
+
- `weight_decay`: 0.0
|
| 195 |
- `adam_beta1`: 0.9
|
| 196 |
- `adam_beta2`: 0.999
|
| 197 |
- `adam_epsilon`: 1e-08
|
| 198 |
+
- `max_grad_norm`: 1
|
| 199 |
+
- `num_train_epochs`: 3
|
| 200 |
+
- `max_steps`: -1
|
| 201 |
- `lr_scheduler_type`: linear
|
| 202 |
- `lr_scheduler_kwargs`: {}
|
| 203 |
+
- `warmup_ratio`: 0.0
|
| 204 |
- `warmup_steps`: 0
|
| 205 |
- `log_level`: passive
|
| 206 |
- `log_level_replica`: warning
|
|
|
|
| 228 |
- `tpu_num_cores`: None
|
| 229 |
- `tpu_metrics_debug`: False
|
| 230 |
- `debug`: []
|
| 231 |
+
- `dataloader_drop_last`: False
|
| 232 |
+
- `dataloader_num_workers`: 0
|
| 233 |
+
- `dataloader_prefetch_factor`: None
|
| 234 |
- `past_index`: -1
|
| 235 |
- `disable_tqdm`: False
|
| 236 |
- `remove_unused_columns`: True
|
| 237 |
- `label_names`: None
|
| 238 |
+
- `load_best_model_at_end`: False
|
| 239 |
- `ignore_data_skip`: False
|
| 240 |
- `fsdp`: []
|
| 241 |
- `fsdp_min_num_params`: 0
|
|
|
|
| 245 |
- `parallelism_config`: None
|
| 246 |
- `deepspeed`: None
|
| 247 |
- `label_smoothing_factor`: 0.0
|
| 248 |
+
- `optim`: adamw_torch_fused
|
| 249 |
- `optim_args`: None
|
| 250 |
- `adafactor`: False
|
| 251 |
- `group_by_length`: False
|
| 252 |
- `length_column_name`: length
|
| 253 |
- `project`: huggingface
|
| 254 |
- `trackio_space_id`: trackio
|
| 255 |
+
- `ddp_find_unused_parameters`: None
|
| 256 |
- `ddp_bucket_cap_mb`: None
|
| 257 |
- `ddp_broadcast_buffers`: False
|
| 258 |
- `dataloader_pin_memory`: True
|
| 259 |
- `dataloader_persistent_workers`: False
|
| 260 |
- `skip_memory_metrics`: True
|
| 261 |
- `use_legacy_prediction_loop`: False
|
| 262 |
+
- `push_to_hub`: False
|
| 263 |
- `resume_from_checkpoint`: None
|
| 264 |
+
- `hub_model_id`: None
|
| 265 |
- `hub_strategy`: every_save
|
| 266 |
- `hub_private_repo`: None
|
| 267 |
- `hub_always_push`: False
|
|
|
|
| 288 |
- `neftune_noise_alpha`: None
|
| 289 |
- `optim_target_modules`: None
|
| 290 |
- `batch_eval_metrics`: False
|
| 291 |
+
- `eval_on_start`: False
|
| 292 |
- `use_liger_kernel`: False
|
| 293 |
- `liger_kernel_config`: None
|
| 294 |
- `eval_use_gather_object`: False
|
| 295 |
- `average_tokens_across_devices`: True
|
| 296 |
- `prompts`: None
|
| 297 |
- `batch_sampler`: batch_sampler
|
| 298 |
+
- `multi_dataset_batch_sampler`: round_robin
|
| 299 |
- `router_mapping`: {}
|
| 300 |
- `learning_rate_mapping`: {}
|
| 301 |
|
| 302 |
</details>
|
| 303 |
|
| 304 |
### Training Logs
|
| 305 |
+
| Epoch | Step | Training Loss |
|
| 306 |
+
|:------:|:----:|:-------------:|
|
| 307 |
+
| 0.3199 | 500 | 0.4294 |
|
| 308 |
+
| 0.6398 | 1000 | 0.1268 |
|
| 309 |
+
| 0.9597 | 1500 | 0.1 |
|
| 310 |
+
| 1.2796 | 2000 | 0.0792 |
|
| 311 |
+
| 1.5995 | 2500 | 0.0706 |
|
| 312 |
+
| 1.9194 | 3000 | 0.0687 |
|
| 313 |
+
| 2.2393 | 3500 | 0.0584 |
|
| 314 |
+
| 2.5592 | 4000 | 0.057 |
|
| 315 |
+
| 2.8791 | 4500 | 0.0581 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 316 |
|
| 317 |
|
| 318 |
### Framework Versions
|
|
|
|
| 321 |
- Transformers: 4.57.3
|
| 322 |
- PyTorch: 2.9.1+cu128
|
| 323 |
- Accelerate: 1.12.0
|
| 324 |
+
- Datasets: 4.4.2
|
| 325 |
- Tokenizers: 0.22.1
|
| 326 |
|
| 327 |
## Citation
|
config.json
CHANGED
|
@@ -1,24 +1,45 @@
|
|
| 1 |
{
|
| 2 |
"architectures": [
|
| 3 |
-
"
|
| 4 |
],
|
| 5 |
-
"
|
| 6 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
"dtype": "float32",
|
| 8 |
-
"
|
| 9 |
-
"
|
| 10 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
"initializer_range": 0.02,
|
| 12 |
-
"intermediate_size":
|
| 13 |
-
"layer_norm_eps": 1e-
|
| 14 |
-
"
|
| 15 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
"num_attention_heads": 12,
|
| 17 |
-
"num_hidden_layers":
|
| 18 |
-
"pad_token_id":
|
| 19 |
"position_embedding_type": "absolute",
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
"transformers_version": "4.57.3",
|
| 21 |
-
"
|
| 22 |
-
"use_cache": true,
|
| 23 |
-
"vocab_size": 30522
|
| 24 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"architectures": [
|
| 3 |
+
"ModernBertModel"
|
| 4 |
],
|
| 5 |
+
"attention_bias": false,
|
| 6 |
+
"attention_dropout": 0.0,
|
| 7 |
+
"bos_token_id": 50281,
|
| 8 |
+
"classifier_activation": "gelu",
|
| 9 |
+
"classifier_bias": false,
|
| 10 |
+
"classifier_dropout": 0.0,
|
| 11 |
+
"classifier_pooling": "mean",
|
| 12 |
+
"cls_token_id": 50281,
|
| 13 |
+
"decoder_bias": true,
|
| 14 |
+
"deterministic_flash_attn": false,
|
| 15 |
"dtype": "float32",
|
| 16 |
+
"embedding_dropout": 0.0,
|
| 17 |
+
"eos_token_id": 50282,
|
| 18 |
+
"global_attn_every_n_layers": 3,
|
| 19 |
+
"global_rope_theta": 160000.0,
|
| 20 |
+
"gradient_checkpointing": false,
|
| 21 |
+
"hidden_activation": "gelu",
|
| 22 |
+
"hidden_size": 768,
|
| 23 |
+
"initializer_cutoff_factor": 2.0,
|
| 24 |
"initializer_range": 0.02,
|
| 25 |
+
"intermediate_size": 1152,
|
| 26 |
+
"layer_norm_eps": 1e-05,
|
| 27 |
+
"local_attention": 128,
|
| 28 |
+
"local_rope_theta": 10000.0,
|
| 29 |
+
"max_position_embeddings": 8192,
|
| 30 |
+
"mlp_bias": false,
|
| 31 |
+
"mlp_dropout": 0.0,
|
| 32 |
+
"model_type": "modernbert",
|
| 33 |
+
"norm_bias": false,
|
| 34 |
+
"norm_eps": 1e-05,
|
| 35 |
"num_attention_heads": 12,
|
| 36 |
+
"num_hidden_layers": 22,
|
| 37 |
+
"pad_token_id": 50283,
|
| 38 |
"position_embedding_type": "absolute",
|
| 39 |
+
"repad_logits_with_grad": false,
|
| 40 |
+
"sep_token_id": 50282,
|
| 41 |
+
"sparse_pred_ignore_index": -100,
|
| 42 |
+
"sparse_prediction": false,
|
| 43 |
"transformers_version": "4.57.3",
|
| 44 |
+
"vocab_size": 50368
|
|
|
|
|
|
|
| 45 |
}
|
eval/Information-Retrieval_evaluation_NanoMSMARCO_results.csv
CHANGED
|
@@ -62,3 +62,24 @@ epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accurac
|
|
| 62 |
0.8070301291248206,4500,0.28,0.58,0.64,0.72,0.28,0.28,0.19333333333333333,0.58,0.128,0.64,0.07200000000000001,0.72,0.4383888888888889,0.5072618819162075,0.45318728603379055
|
| 63 |
0.8518651362984218,4750,0.28,0.58,0.64,0.72,0.28,0.28,0.19333333333333333,0.58,0.128,0.64,0.07200000000000001,0.72,0.4353333333333333,0.5049319844672133,0.4499328092290969
|
| 64 |
0.896700143472023,5000,0.28,0.58,0.64,0.72,0.28,0.28,0.19333333333333333,0.58,0.128,0.64,0.07200000000000001,0.72,0.4386111111111111,0.5075011853031293,0.4533366047009664
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
0.8070301291248206,4500,0.28,0.58,0.64,0.72,0.28,0.28,0.19333333333333333,0.58,0.128,0.64,0.07200000000000001,0.72,0.4383888888888889,0.5072618819162075,0.45318728603379055
|
| 63 |
0.8518651362984218,4750,0.28,0.58,0.64,0.72,0.28,0.28,0.19333333333333333,0.58,0.128,0.64,0.07200000000000001,0.72,0.4353333333333333,0.5049319844672133,0.4499328092290969
|
| 64 |
0.896700143472023,5000,0.28,0.58,0.64,0.72,0.28,0.28,0.19333333333333333,0.58,0.128,0.64,0.07200000000000001,0.72,0.4386111111111111,0.5075011853031293,0.4533366047009664
|
| 65 |
+
0,0,0.44,0.72,0.76,0.86,0.44,0.44,0.24,0.72,0.15200000000000002,0.76,0.08599999999999998,0.86,0.5872222222222222,0.6530251712549636,0.5942230199326107
|
| 66 |
+
0.04483500717360115,250,0.5,0.66,0.74,0.86,0.5,0.5,0.22,0.66,0.14800000000000002,0.74,0.08599999999999998,0.86,0.6009444444444444,0.6614885369108103,0.6057446524064171
|
| 67 |
+
0.0896700143472023,500,0.38,0.64,0.7,0.84,0.38,0.38,0.21333333333333332,0.64,0.14,0.7,0.08399999999999999,0.84,0.5303571428571429,0.6042167983758794,0.5350048266983858
|
| 68 |
+
0.13450502152080343,750,0.34,0.62,0.72,0.84,0.34,0.34,0.20666666666666664,0.62,0.14400000000000002,0.72,0.08399999999999999,0.84,0.4998253968253968,0.5818862869894319,0.5054932208877442
|
| 69 |
+
0.1793400286944046,1000,0.36,0.64,0.74,0.84,0.36,0.36,0.21333333333333332,0.64,0.14800000000000002,0.74,0.08399999999999999,0.84,0.520079365079365,0.5975989120072726,0.5259964429127448
|
| 70 |
+
0.22417503586800575,1250,0.38,0.6,0.7,0.82,0.38,0.38,0.2,0.6,0.14,0.7,0.08199999999999999,0.82,0.5175714285714286,0.5900845581022193,0.5253970322872762
|
| 71 |
+
0.26901004304160686,1500,0.38,0.6,0.74,0.88,0.38,0.38,0.2,0.6,0.14800000000000002,0.74,0.08799999999999997,0.88,0.5306904761904763,0.6137718247368406,0.5338962921488117
|
| 72 |
+
0.31384505021520803,1750,0.44,0.64,0.72,0.82,0.44,0.44,0.21333333333333332,0.64,0.14400000000000002,0.72,0.08199999999999999,0.82,0.5582380952380952,0.6210313401123215,0.5661495407150169
|
| 73 |
+
0.3586800573888092,2000,0.42,0.62,0.74,0.84,0.42,0.42,0.20666666666666667,0.62,0.14800000000000002,0.74,0.08399999999999999,0.84,0.5431111111111111,0.6139286759157707,0.5496822129079311
|
| 74 |
+
0.4035150645624103,2250,0.42,0.64,0.72,0.86,0.42,0.42,0.21333333333333332,0.64,0.14400000000000002,0.72,0.08599999999999998,0.86,0.5545238095238095,0.6274703296032564,0.5598875379006958
|
| 75 |
+
0.4483500717360115,2500,0.4,0.64,0.7,0.84,0.4,0.4,0.21333333333333332,0.64,0.14,0.7,0.08399999999999999,0.84,0.5326269841269842,0.6059110266137139,0.5390528582528582
|
| 76 |
+
0.4931850789096126,2750,0.4,0.6,0.7,0.84,0.4,0.4,0.2,0.6,0.14,0.7,0.08399999999999999,0.84,0.5269603174603174,0.6010982937060065,0.5333106058525309
|
| 77 |
+
0.5380200860832137,3000,0.38,0.62,0.68,0.8,0.38,0.38,0.20666666666666667,0.62,0.136,0.68,0.08,0.8,0.5127698412698413,0.5815761098213873,0.5217435726816532
|
| 78 |
+
0.582855093256815,3250,0.38,0.62,0.68,0.84,0.38,0.38,0.20666666666666667,0.62,0.136,0.68,0.08399999999999999,0.84,0.5195714285714285,0.5959424630497384,0.5261366864130023
|
| 79 |
+
0.6276901004304161,3500,0.34,0.62,0.68,0.84,0.34,0.34,0.20666666666666667,0.62,0.136,0.68,0.08399999999999999,0.84,0.5000238095238095,0.5817461333361641,0.506881638131638
|
| 80 |
+
0.6725251076040172,3750,0.38,0.54,0.68,0.82,0.38,0.38,0.18,0.54,0.136,0.68,0.08199999999999999,0.82,0.4990238095238096,0.574449934686088,0.5065894537036111
|
| 81 |
+
0.7173601147776184,4000,0.36,0.56,0.68,0.84,0.36,0.36,0.18666666666666668,0.56,0.136,0.68,0.08399999999999999,0.84,0.49754761904761907,0.5783298901538331,0.5039462424830071
|
| 82 |
+
0.7621951219512195,4250,0.38,0.54,0.68,0.8,0.38,0.38,0.18,0.54,0.136,0.68,0.08,0.8,0.4962698412698412,0.5679224634618015,0.5047910892580143
|
| 83 |
+
0.8070301291248206,4500,0.38,0.54,0.68,0.8,0.38,0.38,0.18,0.54,0.136,0.68,0.08,0.8,0.49724603174603177,0.5689079415466521,0.5057280863753186
|
| 84 |
+
0.8518651362984218,4750,0.38,0.54,0.68,0.8,0.38,0.38,0.18,0.54,0.136,0.68,0.08,0.8,0.49702380952380953,0.5686686381597302,0.5062534467729701
|
| 85 |
+
0.896700143472023,5000,0.38,0.54,0.68,0.8,0.38,0.38,0.18,0.54,0.136,0.68,0.08,0.8,0.49702380952380953,0.5686686381597302,0.5063338862610184
|
eval/Information-Retrieval_evaluation_NanoNQ_results.csv
CHANGED
|
@@ -62,3 +62,24 @@ epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accurac
|
|
| 62 |
0.8070301291248206,4500,0.34,0.54,0.6,0.66,0.34,0.31,0.18666666666666665,0.51,0.128,0.58,0.07,0.64,0.4526904761904762,0.4867145189304679,0.4396658900478081
|
| 63 |
0.8518651362984218,4750,0.32,0.54,0.62,0.66,0.32,0.3,0.18666666666666665,0.51,0.132,0.6,0.07,0.64,0.4471666666666667,0.48748319998633916,0.44239180468060907
|
| 64 |
0.896700143472023,5000,0.32,0.54,0.6,0.66,0.32,0.3,0.18666666666666665,0.51,0.128,0.58,0.07,0.64,0.4465,0.48687028758380874,0.44172587957864395
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
0.8070301291248206,4500,0.34,0.54,0.6,0.66,0.34,0.31,0.18666666666666665,0.51,0.128,0.58,0.07,0.64,0.4526904761904762,0.4867145189304679,0.4396658900478081
|
| 63 |
0.8518651362984218,4750,0.32,0.54,0.62,0.66,0.32,0.3,0.18666666666666665,0.51,0.132,0.6,0.07,0.64,0.4471666666666667,0.48748319998633916,0.44239180468060907
|
| 64 |
0.896700143472023,5000,0.32,0.54,0.6,0.66,0.32,0.3,0.18666666666666665,0.51,0.128,0.58,0.07,0.64,0.4465,0.48687028758380874,0.44172587957864395
|
| 65 |
+
0,0,0.52,0.7,0.78,0.84,0.52,0.49,0.2333333333333333,0.65,0.15600000000000003,0.73,0.08999999999999998,0.8,0.6295238095238095,0.655180139603386,0.6050874426120327
|
| 66 |
+
0.04483500717360115,250,0.42,0.58,0.64,0.7,0.42,0.4,0.2,0.56,0.136,0.62,0.07400000000000001,0.67,0.5126666666666666,0.5428573559072695,0.5042166330920687
|
| 67 |
+
0.0896700143472023,500,0.38,0.46,0.5,0.52,0.38,0.36,0.16,0.45,0.10800000000000001,0.5,0.05600000000000001,0.52,0.42533333333333334,0.4457898552989127,0.42706306291127477
|
| 68 |
+
0.13450502152080343,750,0.44,0.54,0.56,0.6,0.44,0.43,0.18666666666666665,0.53,0.11600000000000002,0.54,0.06400000000000002,0.59,0.49404761904761896,0.5159570908086976,0.4965777871381774
|
| 69 |
+
0.1793400286944046,1000,0.48,0.6,0.64,0.66,0.48,0.46,0.20666666666666664,0.56,0.136,0.61,0.07,0.63,0.5485238095238095,0.5594855215148441,0.5393816157072532
|
| 70 |
+
0.22417503586800575,1250,0.36,0.48,0.5,0.54,0.36,0.36,0.16,0.46,0.10800000000000001,0.5,0.05800000000000001,0.54,0.4263809523809523,0.454425603688531,0.4335228238127278
|
| 71 |
+
0.26901004304160686,1500,0.5,0.56,0.62,0.68,0.5,0.47,0.19333333333333333,0.53,0.136,0.6,0.07600000000000001,0.67,0.5526666666666666,0.5728784984088857,0.5441837282873221
|
| 72 |
+
0.31384505021520803,1750,0.48,0.6,0.62,0.68,0.48,0.44,0.2133333333333333,0.57,0.132,0.59,0.07400000000000001,0.66,0.546,0.5608375815980038,0.5290146476027398
|
| 73 |
+
0.3586800573888092,2000,0.44,0.6,0.62,0.66,0.44,0.41,0.2133333333333333,0.57,0.132,0.59,0.07200000000000001,0.64,0.5306666666666667,0.5473731717209221,0.5205315285350001
|
| 74 |
+
0.4035150645624103,2250,0.46,0.62,0.62,0.7,0.46,0.42,0.21999999999999997,0.59,0.132,0.59,0.076,0.68,0.5480555555555555,0.5664919007358439,0.5325330170606462
|
| 75 |
+
0.4483500717360115,2500,0.42,0.58,0.62,0.68,0.42,0.38,0.20666666666666664,0.55,0.132,0.59,0.07200000000000001,0.65,0.5171904761904762,0.5349102193796205,0.49767561740605765
|
| 76 |
+
0.4931850789096126,2750,0.42,0.58,0.62,0.66,0.42,0.38,0.20666666666666664,0.56,0.132,0.59,0.07,0.63,0.5128571428571429,0.5305417572323692,0.500674495141964
|
| 77 |
+
0.5380200860832137,3000,0.36,0.58,0.62,0.66,0.36,0.33,0.20666666666666664,0.55,0.132,0.59,0.07,0.63,0.47535714285714287,0.5056776748073686,0.46917147086883276
|
| 78 |
+
0.582855093256815,3250,0.4,0.56,0.6,0.66,0.4,0.36,0.2,0.54,0.128,0.57,0.07200000000000001,0.64,0.4951904761904762,0.5203342116934336,0.4831348147040378
|
| 79 |
+
0.6276901004304161,3500,0.4,0.58,0.62,0.66,0.4,0.36,0.20666666666666664,0.55,0.132,0.59,0.07,0.63,0.4978888888888888,0.5184828560998758,0.4866964965155002
|
| 80 |
+
0.6725251076040172,3750,0.38,0.56,0.6,0.66,0.38,0.35,0.2,0.53,0.12800000000000003,0.57,0.07,0.63,0.477047619047619,0.5051560366496759,0.46717407724278326
|
| 81 |
+
0.7173601147776184,4000,0.4,0.58,0.62,0.66,0.4,0.37,0.20666666666666664,0.55,0.132,0.59,0.07,0.63,0.4948333333333333,0.519008475868834,0.4862411991234443
|
| 82 |
+
0.7621951219512195,4250,0.38,0.54,0.6,0.64,0.38,0.35,0.19333333333333333,0.51,0.12800000000000003,0.57,0.068,0.61,0.47083333333333327,0.49587988697564983,0.4624349223748537
|
| 83 |
+
0.8070301291248206,4500,0.4,0.56,0.6,0.66,0.4,0.36,0.2,0.53,0.12800000000000003,0.57,0.07,0.63,0.4851904761904761,0.5067916971640994,0.4667389293530859
|
| 84 |
+
0.8518651362984218,4750,0.4,0.56,0.62,0.66,0.4,0.36,0.2,0.53,0.132,0.59,0.07,0.63,0.4935238095238095,0.5135182747262652,0.4760970118108672
|
| 85 |
+
0.896700143472023,5000,0.4,0.56,0.6,0.66,0.4,0.36,0.2,0.54,0.12800000000000003,0.58,0.07,0.63,0.48852380952380947,0.5105228253020769,0.4728184565167554
|
eval/NanoBEIR_evaluation_mean_results.csv
CHANGED
|
@@ -62,3 +62,24 @@ epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Accurac
|
|
| 62 |
0.8070301291248206,4500,0.31000000000000005,0.56,0.62,0.69,0.31000000000000005,0.29500000000000004,0.19,0.5449999999999999,0.128,0.61,0.07100000000000001,0.6799999999999999,0.44553968253968257,0.4969882004233377,0.4464265880407993
|
| 63 |
0.8518651362984218,4750,0.30000000000000004,0.56,0.63,0.69,0.30000000000000004,0.29000000000000004,0.19,0.5449999999999999,0.13,0.62,0.07100000000000001,0.6799999999999999,0.44125000000000003,0.49620759222677624,0.446162306954853
|
| 64 |
0.896700143472023,5000,0.30000000000000004,0.56,0.62,0.69,0.30000000000000004,0.29000000000000004,0.19,0.5449999999999999,0.128,0.61,0.07100000000000001,0.6799999999999999,0.4425555555555556,0.497185736443469,0.44753124213980516
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
0.8070301291248206,4500,0.31000000000000005,0.56,0.62,0.69,0.31000000000000005,0.29500000000000004,0.19,0.5449999999999999,0.128,0.61,0.07100000000000001,0.6799999999999999,0.44553968253968257,0.4969882004233377,0.4464265880407993
|
| 63 |
0.8518651362984218,4750,0.30000000000000004,0.56,0.63,0.69,0.30000000000000004,0.29000000000000004,0.19,0.5449999999999999,0.13,0.62,0.07100000000000001,0.6799999999999999,0.44125000000000003,0.49620759222677624,0.446162306954853
|
| 64 |
0.896700143472023,5000,0.30000000000000004,0.56,0.62,0.69,0.30000000000000004,0.29000000000000004,0.19,0.5449999999999999,0.128,0.61,0.07100000000000001,0.6799999999999999,0.4425555555555556,0.497185736443469,0.44753124213980516
|
| 65 |
+
0,0,0.48,0.71,0.77,0.85,0.48,0.46499999999999997,0.23666666666666664,0.685,0.15400000000000003,0.745,0.08799999999999998,0.8300000000000001,0.6083730158730158,0.6541026554291748,0.5996552312723218
|
| 66 |
+
0.04483500717360115,250,0.45999999999999996,0.62,0.69,0.78,0.45999999999999996,0.45,0.21000000000000002,0.6100000000000001,0.14200000000000002,0.6799999999999999,0.07999999999999999,0.765,0.5568055555555556,0.60217294640904,0.5549806427492429
|
| 67 |
+
0.0896700143472023,500,0.38,0.55,0.6,0.6799999999999999,0.38,0.37,0.18666666666666665,0.545,0.12400000000000001,0.6,0.07,0.6799999999999999,0.4778452380952381,0.525003326837396,0.4810339448048303
|
| 68 |
+
0.13450502152080343,750,0.39,0.5800000000000001,0.64,0.72,0.39,0.385,0.19666666666666666,0.575,0.13,0.63,0.07400000000000001,0.715,0.4969365079365079,0.5489216888990647,0.5010355040129608
|
| 69 |
+
0.1793400286944046,1000,0.42,0.62,0.69,0.75,0.42,0.41000000000000003,0.20999999999999996,0.6000000000000001,0.14200000000000002,0.675,0.077,0.735,0.5343015873015873,0.5785422167610583,0.5326890293099991
|
| 70 |
+
0.22417503586800575,1250,0.37,0.54,0.6,0.6799999999999999,0.37,0.37,0.18,0.53,0.12400000000000001,0.6,0.07,0.6799999999999999,0.47197619047619044,0.5222550808953752,0.479459928050002
|
| 71 |
+
0.26901004304160686,1500,0.44,0.5800000000000001,0.6799999999999999,0.78,0.44,0.425,0.19666666666666666,0.565,0.14200000000000002,0.6699999999999999,0.08199999999999999,0.775,0.5416785714285715,0.5933251615728632,0.5390400102180669
|
| 72 |
+
0.31384505021520803,1750,0.45999999999999996,0.62,0.6699999999999999,0.75,0.45999999999999996,0.44,0.21333333333333332,0.605,0.138,0.655,0.078,0.74,0.5521190476190476,0.5909344608551627,0.5475820941588783
|
| 73 |
+
0.3586800573888092,2000,0.43,0.61,0.6799999999999999,0.75,0.43,0.415,0.20999999999999996,0.595,0.14,0.665,0.078,0.74,0.536888888888889,0.5806509238183464,0.5351068707214656
|
| 74 |
+
0.4035150645624103,2250,0.44,0.63,0.6699999999999999,0.78,0.44,0.42,0.21666666666666665,0.615,0.138,0.655,0.08099999999999999,0.77,0.5512896825396825,0.5969811151695501,0.546210277480671
|
| 75 |
+
0.4483500717360115,2500,0.41000000000000003,0.61,0.6599999999999999,0.76,0.41000000000000003,0.39,0.20999999999999996,0.595,0.136,0.645,0.078,0.745,0.5249087301587302,0.5704106229966672,0.518364237829458
|
| 76 |
+
0.4931850789096126,2750,0.41000000000000003,0.59,0.6599999999999999,0.75,0.41000000000000003,0.39,0.2033333333333333,0.5800000000000001,0.136,0.645,0.077,0.735,0.5199087301587302,0.5658200254691879,0.5169925504972475
|
| 77 |
+
0.5380200860832137,3000,0.37,0.6,0.65,0.73,0.37,0.355,0.20666666666666667,0.585,0.134,0.635,0.07500000000000001,0.7150000000000001,0.4940634920634921,0.5436268923143779,0.49545752177524294
|
| 78 |
+
0.582855093256815,3250,0.39,0.5900000000000001,0.64,0.75,0.39,0.37,0.20333333333333334,0.5800000000000001,0.132,0.625,0.078,0.74,0.5073809523809523,0.558138337371586,0.5046357505585201
|
| 79 |
+
0.6276901004304161,3500,0.37,0.6,0.65,0.75,0.37,0.35,0.20666666666666667,0.585,0.134,0.635,0.077,0.735,0.4989563492063492,0.5501144947180199,0.4967890673235691
|
| 80 |
+
0.6725251076040172,3750,0.38,0.55,0.64,0.74,0.38,0.365,0.19,0.535,0.132,0.625,0.076,0.725,0.4880357142857143,0.5398029856678819,0.48688176547319717
|
| 81 |
+
0.7173601147776184,4000,0.38,0.5700000000000001,0.65,0.75,0.38,0.365,0.19666666666666666,0.555,0.134,0.635,0.077,0.735,0.4961904761904762,0.5486691830113335,0.4950937208032257
|
| 82 |
+
0.7621951219512195,4250,0.38,0.54,0.64,0.72,0.38,0.365,0.18666666666666665,0.525,0.132,0.625,0.07400000000000001,0.7050000000000001,0.4835515873015872,0.5319011752187257,0.48361300581643396
|
| 83 |
+
0.8070301291248206,4500,0.39,0.55,0.64,0.73,0.39,0.37,0.19,0.535,0.132,0.625,0.07500000000000001,0.7150000000000001,0.4912182539682539,0.5378498193553758,0.4862335078642023
|
| 84 |
+
0.8518651362984218,4750,0.39,0.55,0.65,0.73,0.39,0.37,0.19,0.535,0.134,0.635,0.07500000000000001,0.7150000000000001,0.4952738095238095,0.5410934564429977,0.4911752292919187
|
| 85 |
+
0.896700143472023,5000,0.39,0.55,0.64,0.73,0.39,0.37,0.19,0.54,0.132,0.63,0.07500000000000001,0.7150000000000001,0.4927738095238095,0.5395957317309036,0.48957617138888687
|
final_metrics.json
CHANGED
|
@@ -1,231 +1,231 @@
|
|
| 1 |
{
|
| 2 |
"nano_beir": {
|
| 3 |
-
"NanoClimateFEVER_cosine_accuracy@1": 0.
|
| 4 |
-
"NanoClimateFEVER_cosine_accuracy@3": 0.
|
| 5 |
-
"NanoClimateFEVER_cosine_accuracy@5": 0.
|
| 6 |
-
"NanoClimateFEVER_cosine_accuracy@10": 0.
|
| 7 |
-
"NanoClimateFEVER_cosine_precision@1": 0.
|
| 8 |
-
"NanoClimateFEVER_cosine_precision@3": 0.
|
| 9 |
-
"NanoClimateFEVER_cosine_precision@5": 0.
|
| 10 |
-
"NanoClimateFEVER_cosine_precision@10": 0.
|
| 11 |
-
"NanoClimateFEVER_cosine_recall@1": 0.
|
| 12 |
-
"NanoClimateFEVER_cosine_recall@3": 0.
|
| 13 |
-
"NanoClimateFEVER_cosine_recall@5": 0.
|
| 14 |
-
"NanoClimateFEVER_cosine_recall@10": 0.
|
| 15 |
-
"NanoClimateFEVER_cosine_ndcg@10": 0.
|
| 16 |
-
"NanoClimateFEVER_cosine_mrr@10": 0.
|
| 17 |
-
"NanoClimateFEVER_cosine_map@100": 0.
|
| 18 |
-
"NanoDBPedia_cosine_accuracy@1": 0.
|
| 19 |
-
"NanoDBPedia_cosine_accuracy@3": 0.
|
| 20 |
-
"NanoDBPedia_cosine_accuracy@5": 0.
|
| 21 |
-
"NanoDBPedia_cosine_accuracy@10": 0.
|
| 22 |
-
"NanoDBPedia_cosine_precision@1": 0.
|
| 23 |
-
"NanoDBPedia_cosine_precision@3": 0.
|
| 24 |
-
"NanoDBPedia_cosine_precision@5": 0.
|
| 25 |
-
"NanoDBPedia_cosine_precision@10": 0.
|
| 26 |
-
"NanoDBPedia_cosine_recall@1": 0.
|
| 27 |
-
"NanoDBPedia_cosine_recall@3": 0.
|
| 28 |
-
"NanoDBPedia_cosine_recall@5": 0.
|
| 29 |
-
"NanoDBPedia_cosine_recall@10": 0.
|
| 30 |
-
"NanoDBPedia_cosine_ndcg@10": 0.
|
| 31 |
-
"NanoDBPedia_cosine_mrr@10": 0.
|
| 32 |
-
"NanoDBPedia_cosine_map@100": 0.
|
| 33 |
-
"NanoFEVER_cosine_accuracy@1": 0.
|
| 34 |
-
"NanoFEVER_cosine_accuracy@3": 0.
|
| 35 |
-
"NanoFEVER_cosine_accuracy@5": 0.
|
| 36 |
-
"NanoFEVER_cosine_accuracy@10": 0.
|
| 37 |
-
"NanoFEVER_cosine_precision@1": 0.
|
| 38 |
-
"NanoFEVER_cosine_precision@3": 0.
|
| 39 |
-
"NanoFEVER_cosine_precision@5": 0.
|
| 40 |
-
"NanoFEVER_cosine_precision@10": 0.
|
| 41 |
-
"NanoFEVER_cosine_recall@1": 0.
|
| 42 |
-
"NanoFEVER_cosine_recall@3": 0.
|
| 43 |
-
"NanoFEVER_cosine_recall@5": 0.
|
| 44 |
-
"NanoFEVER_cosine_recall@10": 0.
|
| 45 |
-
"NanoFEVER_cosine_ndcg@10": 0.
|
| 46 |
-
"NanoFEVER_cosine_mrr@10": 0.
|
| 47 |
-
"NanoFEVER_cosine_map@100": 0.
|
| 48 |
-
"NanoFiQA2018_cosine_accuracy@1": 0.
|
| 49 |
-
"NanoFiQA2018_cosine_accuracy@3": 0.
|
| 50 |
-
"NanoFiQA2018_cosine_accuracy@5": 0.
|
| 51 |
-
"NanoFiQA2018_cosine_accuracy@10": 0.
|
| 52 |
-
"NanoFiQA2018_cosine_precision@1": 0.
|
| 53 |
-
"NanoFiQA2018_cosine_precision@3": 0.
|
| 54 |
"NanoFiQA2018_cosine_precision@5": 0.124,
|
| 55 |
-
"NanoFiQA2018_cosine_precision@10": 0.
|
| 56 |
-
"NanoFiQA2018_cosine_recall@1": 0.
|
| 57 |
-
"NanoFiQA2018_cosine_recall@3": 0.
|
| 58 |
-
"NanoFiQA2018_cosine_recall@5": 0.
|
| 59 |
-
"NanoFiQA2018_cosine_recall@10": 0.
|
| 60 |
-
"NanoFiQA2018_cosine_ndcg@10": 0.
|
| 61 |
-
"NanoFiQA2018_cosine_mrr@10": 0.
|
| 62 |
-
"NanoFiQA2018_cosine_map@100": 0.
|
| 63 |
-
"NanoHotpotQA_cosine_accuracy@1": 0.
|
| 64 |
-
"NanoHotpotQA_cosine_accuracy@3": 0.
|
| 65 |
-
"NanoHotpotQA_cosine_accuracy@5": 0.
|
| 66 |
-
"NanoHotpotQA_cosine_accuracy@10": 0.
|
| 67 |
-
"NanoHotpotQA_cosine_precision@1": 0.
|
| 68 |
-
"NanoHotpotQA_cosine_precision@3": 0.
|
| 69 |
-
"NanoHotpotQA_cosine_precision@5": 0.
|
| 70 |
-
"NanoHotpotQA_cosine_precision@10": 0.
|
| 71 |
-
"NanoHotpotQA_cosine_recall@1": 0.
|
| 72 |
-
"NanoHotpotQA_cosine_recall@3": 0.
|
| 73 |
-
"NanoHotpotQA_cosine_recall@5": 0.
|
| 74 |
-
"NanoHotpotQA_cosine_recall@10": 0.
|
| 75 |
-
"NanoHotpotQA_cosine_ndcg@10": 0.
|
| 76 |
-
"NanoHotpotQA_cosine_mrr@10": 0.
|
| 77 |
-
"NanoHotpotQA_cosine_map@100": 0.
|
| 78 |
"NanoMSMARCO_cosine_accuracy@1": 0.28,
|
| 79 |
-
"NanoMSMARCO_cosine_accuracy@3": 0.
|
| 80 |
-
"NanoMSMARCO_cosine_accuracy@5": 0.
|
| 81 |
-
"NanoMSMARCO_cosine_accuracy@10": 0.
|
| 82 |
"NanoMSMARCO_cosine_precision@1": 0.28,
|
| 83 |
-
"NanoMSMARCO_cosine_precision@3": 0.
|
| 84 |
-
"NanoMSMARCO_cosine_precision@5": 0.
|
| 85 |
-
"NanoMSMARCO_cosine_precision@10": 0.
|
| 86 |
"NanoMSMARCO_cosine_recall@1": 0.28,
|
| 87 |
-
"NanoMSMARCO_cosine_recall@3": 0.
|
| 88 |
-
"NanoMSMARCO_cosine_recall@5": 0.
|
| 89 |
-
"NanoMSMARCO_cosine_recall@10": 0.
|
| 90 |
-
"NanoMSMARCO_cosine_ndcg@10": 0.
|
| 91 |
-
"NanoMSMARCO_cosine_mrr@10": 0.
|
| 92 |
-
"NanoMSMARCO_cosine_map@100": 0.
|
| 93 |
-
"NanoNFCorpus_cosine_accuracy@1": 0.
|
| 94 |
-
"NanoNFCorpus_cosine_accuracy@3": 0.
|
| 95 |
-
"NanoNFCorpus_cosine_accuracy@5": 0.
|
| 96 |
-
"NanoNFCorpus_cosine_accuracy@10": 0.
|
| 97 |
-
"NanoNFCorpus_cosine_precision@1": 0.
|
| 98 |
-
"NanoNFCorpus_cosine_precision@3": 0.
|
| 99 |
-
"NanoNFCorpus_cosine_precision@5": 0.
|
| 100 |
-
"NanoNFCorpus_cosine_precision@10": 0.
|
| 101 |
-
"NanoNFCorpus_cosine_recall@1": 0.
|
| 102 |
-
"NanoNFCorpus_cosine_recall@3": 0.
|
| 103 |
-
"NanoNFCorpus_cosine_recall@5": 0.
|
| 104 |
-
"NanoNFCorpus_cosine_recall@10": 0.
|
| 105 |
-
"NanoNFCorpus_cosine_ndcg@10": 0.
|
| 106 |
-
"NanoNFCorpus_cosine_mrr@10": 0.
|
| 107 |
-
"NanoNFCorpus_cosine_map@100": 0.
|
| 108 |
-
"NanoNQ_cosine_accuracy@1": 0.
|
| 109 |
-
"NanoNQ_cosine_accuracy@3": 0.
|
| 110 |
"NanoNQ_cosine_accuracy@5": 0.6,
|
| 111 |
-
"NanoNQ_cosine_accuracy@10": 0.
|
| 112 |
-
"NanoNQ_cosine_precision@1": 0.
|
| 113 |
-
"NanoNQ_cosine_precision@3": 0.
|
| 114 |
"NanoNQ_cosine_precision@5": 0.128,
|
| 115 |
-
"NanoNQ_cosine_precision@10": 0.
|
| 116 |
-
"NanoNQ_cosine_recall@1": 0.
|
| 117 |
-
"NanoNQ_cosine_recall@3": 0.
|
| 118 |
"NanoNQ_cosine_recall@5": 0.58,
|
| 119 |
-
"NanoNQ_cosine_recall@10": 0.
|
| 120 |
-
"NanoNQ_cosine_ndcg@10": 0.
|
| 121 |
-
"NanoNQ_cosine_mrr@10": 0.
|
| 122 |
-
"NanoNQ_cosine_map@100": 0.
|
| 123 |
-
"NanoQuoraRetrieval_cosine_accuracy@1": 0.
|
| 124 |
"NanoQuoraRetrieval_cosine_accuracy@3": 1.0,
|
| 125 |
"NanoQuoraRetrieval_cosine_accuracy@5": 1.0,
|
| 126 |
"NanoQuoraRetrieval_cosine_accuracy@10": 1.0,
|
| 127 |
-
"NanoQuoraRetrieval_cosine_precision@1": 0.
|
| 128 |
-
"NanoQuoraRetrieval_cosine_precision@3": 0.
|
| 129 |
-
"NanoQuoraRetrieval_cosine_precision@5": 0.
|
| 130 |
-
"NanoQuoraRetrieval_cosine_precision@10": 0.
|
| 131 |
-
"NanoQuoraRetrieval_cosine_recall@1": 0.
|
| 132 |
-
"NanoQuoraRetrieval_cosine_recall@3": 0.
|
| 133 |
-
"NanoQuoraRetrieval_cosine_recall@5": 0.
|
| 134 |
-
"NanoQuoraRetrieval_cosine_recall@10": 0.
|
| 135 |
-
"NanoQuoraRetrieval_cosine_ndcg@10": 0.
|
| 136 |
-
"NanoQuoraRetrieval_cosine_mrr@10": 0.
|
| 137 |
-
"NanoQuoraRetrieval_cosine_map@100": 0.
|
| 138 |
-
"NanoSCIDOCS_cosine_accuracy@1": 0.
|
| 139 |
-
"NanoSCIDOCS_cosine_accuracy@3": 0.
|
| 140 |
-
"NanoSCIDOCS_cosine_accuracy@5": 0.
|
| 141 |
-
"NanoSCIDOCS_cosine_accuracy@10": 0.
|
| 142 |
-
"NanoSCIDOCS_cosine_precision@1": 0.
|
| 143 |
-
"NanoSCIDOCS_cosine_precision@3": 0.
|
| 144 |
-
"NanoSCIDOCS_cosine_precision@5": 0.
|
| 145 |
-
"NanoSCIDOCS_cosine_precision@10": 0.
|
| 146 |
-
"NanoSCIDOCS_cosine_recall@1": 0.
|
| 147 |
-
"NanoSCIDOCS_cosine_recall@3": 0.
|
| 148 |
-
"NanoSCIDOCS_cosine_recall@5": 0.
|
| 149 |
-
"NanoSCIDOCS_cosine_recall@10": 0.
|
| 150 |
-
"NanoSCIDOCS_cosine_ndcg@10": 0.
|
| 151 |
-
"NanoSCIDOCS_cosine_mrr@10": 0.
|
| 152 |
-
"NanoSCIDOCS_cosine_map@100": 0.
|
| 153 |
-
"NanoArguAna_cosine_accuracy@1": 0.
|
| 154 |
-
"NanoArguAna_cosine_accuracy@3": 0.
|
| 155 |
-
"NanoArguAna_cosine_accuracy@5": 0.
|
| 156 |
-
"NanoArguAna_cosine_accuracy@10": 0.
|
| 157 |
-
"NanoArguAna_cosine_precision@1": 0.
|
| 158 |
-
"NanoArguAna_cosine_precision@3": 0.
|
| 159 |
-
"NanoArguAna_cosine_precision@5": 0.
|
| 160 |
-
"NanoArguAna_cosine_precision@10": 0.
|
| 161 |
-
"NanoArguAna_cosine_recall@1": 0.
|
| 162 |
-
"NanoArguAna_cosine_recall@3": 0.
|
| 163 |
-
"NanoArguAna_cosine_recall@5": 0.
|
| 164 |
-
"NanoArguAna_cosine_recall@10": 0.
|
| 165 |
-
"NanoArguAna_cosine_ndcg@10": 0.
|
| 166 |
-
"NanoArguAna_cosine_mrr@10": 0.
|
| 167 |
-
"NanoArguAna_cosine_map@100": 0.
|
| 168 |
-
"NanoSciFact_cosine_accuracy@1": 0.
|
| 169 |
-
"NanoSciFact_cosine_accuracy@3": 0.
|
| 170 |
-
"NanoSciFact_cosine_accuracy@5": 0.
|
| 171 |
-
"NanoSciFact_cosine_accuracy@10": 0.
|
| 172 |
-
"NanoSciFact_cosine_precision@1": 0.
|
| 173 |
-
"NanoSciFact_cosine_precision@3": 0.
|
| 174 |
-
"NanoSciFact_cosine_precision@5": 0.
|
| 175 |
-
"NanoSciFact_cosine_precision@10": 0.
|
| 176 |
-
"NanoSciFact_cosine_recall@1": 0.
|
| 177 |
-
"NanoSciFact_cosine_recall@3": 0.
|
| 178 |
-
"NanoSciFact_cosine_recall@5": 0.
|
| 179 |
-
"NanoSciFact_cosine_recall@10": 0.
|
| 180 |
-
"NanoSciFact_cosine_ndcg@10": 0.
|
| 181 |
-
"NanoSciFact_cosine_mrr@10": 0.
|
| 182 |
-
"NanoSciFact_cosine_map@100": 0.
|
| 183 |
-
"NanoTouche2020_cosine_accuracy@1": 0.
|
| 184 |
-
"NanoTouche2020_cosine_accuracy@3": 0.
|
| 185 |
-
"NanoTouche2020_cosine_accuracy@5": 0.
|
| 186 |
-
"NanoTouche2020_cosine_accuracy@10": 0.
|
| 187 |
-
"NanoTouche2020_cosine_precision@1": 0.
|
| 188 |
-
"NanoTouche2020_cosine_precision@3": 0.
|
| 189 |
-
"NanoTouche2020_cosine_precision@5": 0.
|
| 190 |
-
"NanoTouche2020_cosine_precision@10": 0.
|
| 191 |
-
"NanoTouche2020_cosine_recall@1": 0.
|
| 192 |
-
"NanoTouche2020_cosine_recall@3": 0.
|
| 193 |
-
"NanoTouche2020_cosine_recall@5": 0.
|
| 194 |
-
"NanoTouche2020_cosine_recall@10": 0.
|
| 195 |
-
"NanoTouche2020_cosine_ndcg@10": 0.
|
| 196 |
-
"NanoTouche2020_cosine_mrr@10": 0.
|
| 197 |
-
"NanoTouche2020_cosine_map@100": 0.
|
| 198 |
-
"NanoBEIR_mean_cosine_accuracy@1": 0.
|
| 199 |
-
"NanoBEIR_mean_cosine_accuracy@3": 0.
|
| 200 |
-
"NanoBEIR_mean_cosine_accuracy@5": 0.
|
| 201 |
-
"NanoBEIR_mean_cosine_accuracy@10": 0.
|
| 202 |
-
"NanoBEIR_mean_cosine_precision@1": 0.
|
| 203 |
-
"NanoBEIR_mean_cosine_precision@3": 0.
|
| 204 |
-
"NanoBEIR_mean_cosine_precision@5": 0.
|
| 205 |
-
"NanoBEIR_mean_cosine_precision@10": 0.
|
| 206 |
-
"NanoBEIR_mean_cosine_recall@1": 0.
|
| 207 |
-
"NanoBEIR_mean_cosine_recall@3": 0.
|
| 208 |
-
"NanoBEIR_mean_cosine_recall@5": 0.
|
| 209 |
-
"NanoBEIR_mean_cosine_recall@10": 0.
|
| 210 |
-
"NanoBEIR_mean_cosine_ndcg@10": 0.
|
| 211 |
-
"NanoBEIR_mean_cosine_mrr@10": 0.
|
| 212 |
-
"NanoBEIR_mean_cosine_map@100": 0.
|
| 213 |
},
|
| 214 |
"beir_touche2020": {
|
| 215 |
-
"BeIR-touche2020-subset-test_cosine_accuracy@1": 0.
|
| 216 |
-
"BeIR-touche2020-subset-test_cosine_accuracy@3": 0.
|
| 217 |
"BeIR-touche2020-subset-test_cosine_accuracy@5": 0.9591836734693877,
|
| 218 |
"BeIR-touche2020-subset-test_cosine_accuracy@10": 0.9795918367346939,
|
| 219 |
-
"BeIR-touche2020-subset-test_cosine_precision@1": 0.
|
| 220 |
-
"BeIR-touche2020-subset-test_cosine_precision@3": 0.
|
| 221 |
-
"BeIR-touche2020-subset-test_cosine_precision@5": 0.
|
| 222 |
-
"BeIR-touche2020-subset-test_cosine_precision@10": 0.
|
| 223 |
-
"BeIR-touche2020-subset-test_cosine_recall@1": 0.
|
| 224 |
-
"BeIR-touche2020-subset-test_cosine_recall@3": 0.
|
| 225 |
-
"BeIR-touche2020-subset-test_cosine_recall@5": 0.
|
| 226 |
-
"BeIR-touche2020-subset-test_cosine_recall@10": 0.
|
| 227 |
-
"BeIR-touche2020-subset-test_cosine_ndcg@10": 0.
|
| 228 |
-
"BeIR-touche2020-subset-test_cosine_mrr@10": 0.
|
| 229 |
-
"BeIR-touche2020-subset-test_cosine_map@100": 0.
|
| 230 |
}
|
| 231 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"nano_beir": {
|
| 3 |
+
"NanoClimateFEVER_cosine_accuracy@1": 0.22,
|
| 4 |
+
"NanoClimateFEVER_cosine_accuracy@3": 0.36,
|
| 5 |
+
"NanoClimateFEVER_cosine_accuracy@5": 0.44,
|
| 6 |
+
"NanoClimateFEVER_cosine_accuracy@10": 0.68,
|
| 7 |
+
"NanoClimateFEVER_cosine_precision@1": 0.22,
|
| 8 |
+
"NanoClimateFEVER_cosine_precision@3": 0.12,
|
| 9 |
+
"NanoClimateFEVER_cosine_precision@5": 0.09200000000000001,
|
| 10 |
+
"NanoClimateFEVER_cosine_precision@10": 0.08,
|
| 11 |
+
"NanoClimateFEVER_cosine_recall@1": 0.10166666666666666,
|
| 12 |
+
"NanoClimateFEVER_cosine_recall@3": 0.1433333333333333,
|
| 13 |
+
"NanoClimateFEVER_cosine_recall@5": 0.19666666666666666,
|
| 14 |
+
"NanoClimateFEVER_cosine_recall@10": 0.32233333333333336,
|
| 15 |
+
"NanoClimateFEVER_cosine_ndcg@10": 0.24154521021050848,
|
| 16 |
+
"NanoClimateFEVER_cosine_mrr@10": 0.3309126984126983,
|
| 17 |
+
"NanoClimateFEVER_cosine_map@100": 0.17419838412151278,
|
| 18 |
+
"NanoDBPedia_cosine_accuracy@1": 0.62,
|
| 19 |
+
"NanoDBPedia_cosine_accuracy@3": 0.78,
|
| 20 |
+
"NanoDBPedia_cosine_accuracy@5": 0.84,
|
| 21 |
+
"NanoDBPedia_cosine_accuracy@10": 0.92,
|
| 22 |
+
"NanoDBPedia_cosine_precision@1": 0.62,
|
| 23 |
+
"NanoDBPedia_cosine_precision@3": 0.5199999999999999,
|
| 24 |
+
"NanoDBPedia_cosine_precision@5": 0.452,
|
| 25 |
+
"NanoDBPedia_cosine_precision@10": 0.364,
|
| 26 |
+
"NanoDBPedia_cosine_recall@1": 0.07067219113244924,
|
| 27 |
+
"NanoDBPedia_cosine_recall@3": 0.1473605766367288,
|
| 28 |
+
"NanoDBPedia_cosine_recall@5": 0.18535916558236945,
|
| 29 |
+
"NanoDBPedia_cosine_recall@10": 0.27484027728017424,
|
| 30 |
+
"NanoDBPedia_cosine_ndcg@10": 0.4817256707832707,
|
| 31 |
+
"NanoDBPedia_cosine_mrr@10": 0.7183571428571429,
|
| 32 |
+
"NanoDBPedia_cosine_map@100": 0.3555155246867996,
|
| 33 |
+
"NanoFEVER_cosine_accuracy@1": 0.76,
|
| 34 |
+
"NanoFEVER_cosine_accuracy@3": 0.84,
|
| 35 |
+
"NanoFEVER_cosine_accuracy@5": 0.9,
|
| 36 |
+
"NanoFEVER_cosine_accuracy@10": 0.9,
|
| 37 |
+
"NanoFEVER_cosine_precision@1": 0.76,
|
| 38 |
+
"NanoFEVER_cosine_precision@3": 0.29333333333333333,
|
| 39 |
+
"NanoFEVER_cosine_precision@5": 0.18799999999999997,
|
| 40 |
+
"NanoFEVER_cosine_precision@10": 0.09399999999999999,
|
| 41 |
+
"NanoFEVER_cosine_recall@1": 0.7066666666666666,
|
| 42 |
+
"NanoFEVER_cosine_recall@3": 0.7933333333333333,
|
| 43 |
+
"NanoFEVER_cosine_recall@5": 0.8533333333333333,
|
| 44 |
+
"NanoFEVER_cosine_recall@10": 0.8533333333333333,
|
| 45 |
+
"NanoFEVER_cosine_ndcg@10": 0.7928392587586685,
|
| 46 |
+
"NanoFEVER_cosine_mrr@10": 0.805,
|
| 47 |
+
"NanoFEVER_cosine_map@100": 0.7629176534259828,
|
| 48 |
+
"NanoFiQA2018_cosine_accuracy@1": 0.3,
|
| 49 |
+
"NanoFiQA2018_cosine_accuracy@3": 0.4,
|
| 50 |
+
"NanoFiQA2018_cosine_accuracy@5": 0.42,
|
| 51 |
+
"NanoFiQA2018_cosine_accuracy@10": 0.52,
|
| 52 |
+
"NanoFiQA2018_cosine_precision@1": 0.3,
|
| 53 |
+
"NanoFiQA2018_cosine_precision@3": 0.1733333333333333,
|
| 54 |
"NanoFiQA2018_cosine_precision@5": 0.124,
|
| 55 |
+
"NanoFiQA2018_cosine_precision@10": 0.08,
|
| 56 |
+
"NanoFiQA2018_cosine_recall@1": 0.14474603174603173,
|
| 57 |
+
"NanoFiQA2018_cosine_recall@3": 0.2201031746031746,
|
| 58 |
+
"NanoFiQA2018_cosine_recall@5": 0.2519365079365079,
|
| 59 |
+
"NanoFiQA2018_cosine_recall@10": 0.31682539682539684,
|
| 60 |
+
"NanoFiQA2018_cosine_ndcg@10": 0.2761214091575939,
|
| 61 |
+
"NanoFiQA2018_cosine_mrr@10": 0.35405555555555557,
|
| 62 |
+
"NanoFiQA2018_cosine_map@100": 0.23650883511668197,
|
| 63 |
+
"NanoHotpotQA_cosine_accuracy@1": 0.7,
|
| 64 |
+
"NanoHotpotQA_cosine_accuracy@3": 0.76,
|
| 65 |
+
"NanoHotpotQA_cosine_accuracy@5": 0.78,
|
| 66 |
+
"NanoHotpotQA_cosine_accuracy@10": 0.8,
|
| 67 |
+
"NanoHotpotQA_cosine_precision@1": 0.7,
|
| 68 |
+
"NanoHotpotQA_cosine_precision@3": 0.3733333333333333,
|
| 69 |
+
"NanoHotpotQA_cosine_precision@5": 0.244,
|
| 70 |
+
"NanoHotpotQA_cosine_precision@10": 0.13399999999999998,
|
| 71 |
+
"NanoHotpotQA_cosine_recall@1": 0.35,
|
| 72 |
+
"NanoHotpotQA_cosine_recall@3": 0.56,
|
| 73 |
+
"NanoHotpotQA_cosine_recall@5": 0.61,
|
| 74 |
+
"NanoHotpotQA_cosine_recall@10": 0.67,
|
| 75 |
+
"NanoHotpotQA_cosine_ndcg@10": 0.6362341710243232,
|
| 76 |
+
"NanoHotpotQA_cosine_mrr@10": 0.7373333333333334,
|
| 77 |
+
"NanoHotpotQA_cosine_map@100": 0.579012255659608,
|
| 78 |
"NanoMSMARCO_cosine_accuracy@1": 0.28,
|
| 79 |
+
"NanoMSMARCO_cosine_accuracy@3": 0.58,
|
| 80 |
+
"NanoMSMARCO_cosine_accuracy@5": 0.64,
|
| 81 |
+
"NanoMSMARCO_cosine_accuracy@10": 0.72,
|
| 82 |
"NanoMSMARCO_cosine_precision@1": 0.28,
|
| 83 |
+
"NanoMSMARCO_cosine_precision@3": 0.19333333333333333,
|
| 84 |
+
"NanoMSMARCO_cosine_precision@5": 0.128,
|
| 85 |
+
"NanoMSMARCO_cosine_precision@10": 0.07200000000000001,
|
| 86 |
"NanoMSMARCO_cosine_recall@1": 0.28,
|
| 87 |
+
"NanoMSMARCO_cosine_recall@3": 0.58,
|
| 88 |
+
"NanoMSMARCO_cosine_recall@5": 0.64,
|
| 89 |
+
"NanoMSMARCO_cosine_recall@10": 0.72,
|
| 90 |
+
"NanoMSMARCO_cosine_ndcg@10": 0.5075011853031293,
|
| 91 |
+
"NanoMSMARCO_cosine_mrr@10": 0.4386111111111111,
|
| 92 |
+
"NanoMSMARCO_cosine_map@100": 0.4533366047009664,
|
| 93 |
+
"NanoNFCorpus_cosine_accuracy@1": 0.38,
|
| 94 |
+
"NanoNFCorpus_cosine_accuracy@3": 0.46,
|
| 95 |
+
"NanoNFCorpus_cosine_accuracy@5": 0.54,
|
| 96 |
+
"NanoNFCorpus_cosine_accuracy@10": 0.64,
|
| 97 |
+
"NanoNFCorpus_cosine_precision@1": 0.38,
|
| 98 |
+
"NanoNFCorpus_cosine_precision@3": 0.3,
|
| 99 |
+
"NanoNFCorpus_cosine_precision@5": 0.3,
|
| 100 |
+
"NanoNFCorpus_cosine_precision@10": 0.276,
|
| 101 |
+
"NanoNFCorpus_cosine_recall@1": 0.012479157217241355,
|
| 102 |
+
"NanoNFCorpus_cosine_recall@3": 0.04881894595681059,
|
| 103 |
+
"NanoNFCorpus_cosine_recall@5": 0.06922116223257517,
|
| 104 |
+
"NanoNFCorpus_cosine_recall@10": 0.10938910626227699,
|
| 105 |
+
"NanoNFCorpus_cosine_ndcg@10": 0.30283246736353403,
|
| 106 |
+
"NanoNFCorpus_cosine_mrr@10": 0.45196825396825396,
|
| 107 |
+
"NanoNFCorpus_cosine_map@100": 0.12231981928859673,
|
| 108 |
+
"NanoNQ_cosine_accuracy@1": 0.32,
|
| 109 |
+
"NanoNQ_cosine_accuracy@3": 0.54,
|
| 110 |
"NanoNQ_cosine_accuracy@5": 0.6,
|
| 111 |
+
"NanoNQ_cosine_accuracy@10": 0.66,
|
| 112 |
+
"NanoNQ_cosine_precision@1": 0.32,
|
| 113 |
+
"NanoNQ_cosine_precision@3": 0.18666666666666665,
|
| 114 |
"NanoNQ_cosine_precision@5": 0.128,
|
| 115 |
+
"NanoNQ_cosine_precision@10": 0.07,
|
| 116 |
+
"NanoNQ_cosine_recall@1": 0.3,
|
| 117 |
+
"NanoNQ_cosine_recall@3": 0.51,
|
| 118 |
"NanoNQ_cosine_recall@5": 0.58,
|
| 119 |
+
"NanoNQ_cosine_recall@10": 0.64,
|
| 120 |
+
"NanoNQ_cosine_ndcg@10": 0.48687028758380874,
|
| 121 |
+
"NanoNQ_cosine_mrr@10": 0.4465,
|
| 122 |
+
"NanoNQ_cosine_map@100": 0.4417143853257704,
|
| 123 |
+
"NanoQuoraRetrieval_cosine_accuracy@1": 0.88,
|
| 124 |
"NanoQuoraRetrieval_cosine_accuracy@3": 1.0,
|
| 125 |
"NanoQuoraRetrieval_cosine_accuracy@5": 1.0,
|
| 126 |
"NanoQuoraRetrieval_cosine_accuracy@10": 1.0,
|
| 127 |
+
"NanoQuoraRetrieval_cosine_precision@1": 0.88,
|
| 128 |
+
"NanoQuoraRetrieval_cosine_precision@3": 0.41999999999999993,
|
| 129 |
+
"NanoQuoraRetrieval_cosine_precision@5": 0.26799999999999996,
|
| 130 |
+
"NanoQuoraRetrieval_cosine_precision@10": 0.13599999999999998,
|
| 131 |
+
"NanoQuoraRetrieval_cosine_recall@1": 0.7773333333333332,
|
| 132 |
+
"NanoQuoraRetrieval_cosine_recall@3": 0.972,
|
| 133 |
+
"NanoQuoraRetrieval_cosine_recall@5": 0.9893333333333334,
|
| 134 |
+
"NanoQuoraRetrieval_cosine_recall@10": 0.9926666666666667,
|
| 135 |
+
"NanoQuoraRetrieval_cosine_ndcg@10": 0.9483612484877714,
|
| 136 |
+
"NanoQuoraRetrieval_cosine_mrr@10": 0.9366666666666665,
|
| 137 |
+
"NanoQuoraRetrieval_cosine_map@100": 0.9296388888888888,
|
| 138 |
+
"NanoSCIDOCS_cosine_accuracy@1": 0.46,
|
| 139 |
+
"NanoSCIDOCS_cosine_accuracy@3": 0.68,
|
| 140 |
+
"NanoSCIDOCS_cosine_accuracy@5": 0.84,
|
| 141 |
+
"NanoSCIDOCS_cosine_accuracy@10": 0.9,
|
| 142 |
+
"NanoSCIDOCS_cosine_precision@1": 0.46,
|
| 143 |
+
"NanoSCIDOCS_cosine_precision@3": 0.3533333333333333,
|
| 144 |
+
"NanoSCIDOCS_cosine_precision@5": 0.304,
|
| 145 |
+
"NanoSCIDOCS_cosine_precision@10": 0.18999999999999997,
|
| 146 |
+
"NanoSCIDOCS_cosine_recall@1": 0.09666666666666666,
|
| 147 |
+
"NanoSCIDOCS_cosine_recall@3": 0.21766666666666665,
|
| 148 |
+
"NanoSCIDOCS_cosine_recall@5": 0.31266666666666665,
|
| 149 |
+
"NanoSCIDOCS_cosine_recall@10": 0.3896666666666666,
|
| 150 |
+
"NanoSCIDOCS_cosine_ndcg@10": 0.3893008993021786,
|
| 151 |
+
"NanoSCIDOCS_cosine_mrr@10": 0.6114444444444443,
|
| 152 |
+
"NanoSCIDOCS_cosine_map@100": 0.3091964898288773,
|
| 153 |
+
"NanoArguAna_cosine_accuracy@1": 0.22,
|
| 154 |
+
"NanoArguAna_cosine_accuracy@3": 0.54,
|
| 155 |
+
"NanoArguAna_cosine_accuracy@5": 0.58,
|
| 156 |
+
"NanoArguAna_cosine_accuracy@10": 0.74,
|
| 157 |
+
"NanoArguAna_cosine_precision@1": 0.22,
|
| 158 |
+
"NanoArguAna_cosine_precision@3": 0.18,
|
| 159 |
+
"NanoArguAna_cosine_precision@5": 0.11600000000000002,
|
| 160 |
+
"NanoArguAna_cosine_precision@10": 0.07400000000000001,
|
| 161 |
+
"NanoArguAna_cosine_recall@1": 0.22,
|
| 162 |
+
"NanoArguAna_cosine_recall@3": 0.54,
|
| 163 |
+
"NanoArguAna_cosine_recall@5": 0.58,
|
| 164 |
+
"NanoArguAna_cosine_recall@10": 0.74,
|
| 165 |
+
"NanoArguAna_cosine_ndcg@10": 0.4674888162177975,
|
| 166 |
+
"NanoArguAna_cosine_mrr@10": 0.3815793650793651,
|
| 167 |
+
"NanoArguAna_cosine_map@100": 0.3917367299367299,
|
| 168 |
+
"NanoSciFact_cosine_accuracy@1": 0.58,
|
| 169 |
+
"NanoSciFact_cosine_accuracy@3": 0.68,
|
| 170 |
+
"NanoSciFact_cosine_accuracy@5": 0.7,
|
| 171 |
+
"NanoSciFact_cosine_accuracy@10": 0.72,
|
| 172 |
+
"NanoSciFact_cosine_precision@1": 0.58,
|
| 173 |
+
"NanoSciFact_cosine_precision@3": 0.2533333333333333,
|
| 174 |
+
"NanoSciFact_cosine_precision@5": 0.156,
|
| 175 |
+
"NanoSciFact_cosine_precision@10": 0.08199999999999999,
|
| 176 |
+
"NanoSciFact_cosine_recall@1": 0.545,
|
| 177 |
+
"NanoSciFact_cosine_recall@3": 0.665,
|
| 178 |
+
"NanoSciFact_cosine_recall@5": 0.685,
|
| 179 |
+
"NanoSciFact_cosine_recall@10": 0.71,
|
| 180 |
+
"NanoSciFact_cosine_ndcg@10": 0.6523742480687815,
|
| 181 |
+
"NanoSciFact_cosine_mrr@10": 0.6375,
|
| 182 |
+
"NanoSciFact_cosine_map@100": 0.6370166989443306,
|
| 183 |
+
"NanoTouche2020_cosine_accuracy@1": 0.4489795918367347,
|
| 184 |
+
"NanoTouche2020_cosine_accuracy@3": 0.7755102040816326,
|
| 185 |
+
"NanoTouche2020_cosine_accuracy@5": 0.8979591836734694,
|
| 186 |
+
"NanoTouche2020_cosine_accuracy@10": 0.9795918367346939,
|
| 187 |
+
"NanoTouche2020_cosine_precision@1": 0.4489795918367347,
|
| 188 |
+
"NanoTouche2020_cosine_precision@3": 0.41496598639455773,
|
| 189 |
+
"NanoTouche2020_cosine_precision@5": 0.4326530612244897,
|
| 190 |
+
"NanoTouche2020_cosine_precision@10": 0.3591836734693878,
|
| 191 |
+
"NanoTouche2020_cosine_recall@1": 0.03398816288797225,
|
| 192 |
+
"NanoTouche2020_cosine_recall@3": 0.09117793391499442,
|
| 193 |
+
"NanoTouche2020_cosine_recall@5": 0.15321422858378142,
|
| 194 |
+
"NanoTouche2020_cosine_recall@10": 0.24000922572748823,
|
| 195 |
+
"NanoTouche2020_cosine_ndcg@10": 0.3959230964031327,
|
| 196 |
+
"NanoTouche2020_cosine_mrr@10": 0.6308309037900873,
|
| 197 |
+
"NanoTouche2020_cosine_map@100": 0.3023791135389433,
|
| 198 |
+
"NanoBEIR_mean_cosine_accuracy@1": 0.47453689167974883,
|
| 199 |
+
"NanoBEIR_mean_cosine_accuracy@3": 0.6458084772370486,
|
| 200 |
+
"NanoBEIR_mean_cosine_accuracy@5": 0.7059968602825746,
|
| 201 |
+
"NanoBEIR_mean_cosine_accuracy@10": 0.7830455259026687,
|
| 202 |
+
"NanoBEIR_mean_cosine_precision@1": 0.47453689167974883,
|
| 203 |
+
"NanoBEIR_mean_cosine_precision@3": 0.290894819466248,
|
| 204 |
+
"NanoBEIR_mean_cosine_precision@5": 0.22558869701726847,
|
| 205 |
+
"NanoBEIR_mean_cosine_precision@10": 0.15470643642072213,
|
| 206 |
+
"NanoBEIR_mean_cosine_recall@1": 0.2799399135628483,
|
| 207 |
+
"NanoBEIR_mean_cosine_recall@3": 0.42221492034192626,
|
| 208 |
+
"NanoBEIR_mean_cosine_recall@5": 0.4697485434104026,
|
| 209 |
+
"NanoBEIR_mean_cosine_recall@10": 0.536851077391949,
|
| 210 |
+
"NanoBEIR_mean_cosine_ndcg@10": 0.5060859975895767,
|
| 211 |
+
"NanoBEIR_mean_cosine_mrr@10": 0.5754430365552815,
|
| 212 |
+
"NanoBEIR_mean_cosine_map@100": 0.4381147218048991
|
| 213 |
},
|
| 214 |
"beir_touche2020": {
|
| 215 |
+
"BeIR-touche2020-subset-test_cosine_accuracy@1": 0.6530612244897959,
|
| 216 |
+
"BeIR-touche2020-subset-test_cosine_accuracy@3": 0.9591836734693877,
|
| 217 |
"BeIR-touche2020-subset-test_cosine_accuracy@5": 0.9591836734693877,
|
| 218 |
"BeIR-touche2020-subset-test_cosine_accuracy@10": 0.9795918367346939,
|
| 219 |
+
"BeIR-touche2020-subset-test_cosine_precision@1": 0.6530612244897959,
|
| 220 |
+
"BeIR-touche2020-subset-test_cosine_precision@3": 0.727891156462585,
|
| 221 |
+
"BeIR-touche2020-subset-test_cosine_precision@5": 0.6448979591836735,
|
| 222 |
+
"BeIR-touche2020-subset-test_cosine_precision@10": 0.5795918367346938,
|
| 223 |
+
"BeIR-touche2020-subset-test_cosine_recall@1": 0.01432104834093062,
|
| 224 |
+
"BeIR-touche2020-subset-test_cosine_recall@3": 0.048281100280382724,
|
| 225 |
+
"BeIR-touche2020-subset-test_cosine_recall@5": 0.07113270115683268,
|
| 226 |
+
"BeIR-touche2020-subset-test_cosine_recall@10": 0.1277462197359846,
|
| 227 |
+
"BeIR-touche2020-subset-test_cosine_ndcg@10": 0.6075766364842123,
|
| 228 |
+
"BeIR-touche2020-subset-test_cosine_mrr@10": 0.7950680272108844,
|
| 229 |
+
"BeIR-touche2020-subset-test_cosine_map@100": 0.25739036878474303
|
| 230 |
}
|
| 231 |
}
|
model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0e66bd1a9ef557e4d2c8b9b0ed8d2be4c7ef70fbeeb03cde61cd58c4f4bc8351
|
| 3 |
+
size 596070136
|
modules.json
CHANGED
|
@@ -10,11 +10,5 @@
|
|
| 10 |
"name": "1",
|
| 11 |
"path": "1_Pooling",
|
| 12 |
"type": "sentence_transformers.models.Pooling"
|
| 13 |
-
},
|
| 14 |
-
{
|
| 15 |
-
"idx": 2,
|
| 16 |
-
"name": "2",
|
| 17 |
-
"path": "2_Normalize",
|
| 18 |
-
"type": "sentence_transformers.models.Normalize"
|
| 19 |
}
|
| 20 |
]
|
|
|
|
| 10 |
"name": "1",
|
| 11 |
"path": "1_Pooling",
|
| 12 |
"type": "sentence_transformers.models.Pooling"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
}
|
| 14 |
]
|
special_tokens_map.json
CHANGED
|
@@ -8,7 +8,7 @@
|
|
| 8 |
},
|
| 9 |
"mask_token": {
|
| 10 |
"content": "[MASK]",
|
| 11 |
-
"lstrip":
|
| 12 |
"normalized": false,
|
| 13 |
"rstrip": false,
|
| 14 |
"single_word": false
|
|
|
|
| 8 |
},
|
| 9 |
"mask_token": {
|
| 10 |
"content": "[MASK]",
|
| 11 |
+
"lstrip": true,
|
| 12 |
"normalized": false,
|
| 13 |
"rstrip": false,
|
| 14 |
"single_word": false
|
tokenizer.json
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:218484396a9d08293b108b0e5ea31e0a1b7c801dcbed35a821deb392d2fe9bb4
|
| 3 |
+
size 3583485
|
tokenizer_config.json
CHANGED
|
@@ -1,14 +1,230 @@
|
|
| 1 |
{
|
| 2 |
"added_tokens_decoder": {
|
| 3 |
"0": {
|
| 4 |
-
"content": "
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
"lstrip": false,
|
| 6 |
"normalized": false,
|
| 7 |
"rstrip": false,
|
| 8 |
"single_word": false,
|
| 9 |
"special": true
|
| 10 |
},
|
| 11 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
"content": "[UNK]",
|
| 13 |
"lstrip": false,
|
| 14 |
"normalized": false,
|
|
@@ -16,7 +232,7 @@
|
|
| 16 |
"single_word": false,
|
| 17 |
"special": true
|
| 18 |
},
|
| 19 |
-
"
|
| 20 |
"content": "[CLS]",
|
| 21 |
"lstrip": false,
|
| 22 |
"normalized": false,
|
|
@@ -24,7 +240,7 @@
|
|
| 24 |
"single_word": false,
|
| 25 |
"special": true
|
| 26 |
},
|
| 27 |
-
"
|
| 28 |
"content": "[SEP]",
|
| 29 |
"lstrip": false,
|
| 30 |
"normalized": false,
|
|
@@ -32,34 +248,698 @@
|
|
| 32 |
"single_word": false,
|
| 33 |
"special": true
|
| 34 |
},
|
| 35 |
-
"
|
| 36 |
-
"content": "[
|
| 37 |
"lstrip": false,
|
| 38 |
"normalized": false,
|
| 39 |
"rstrip": false,
|
| 40 |
"single_word": false,
|
| 41 |
"special": true
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 42 |
}
|
| 43 |
},
|
| 44 |
"clean_up_tokenization_spaces": true,
|
| 45 |
"cls_token": "[CLS]",
|
| 46 |
-
"do_basic_tokenize": true,
|
| 47 |
-
"do_lower_case": true,
|
| 48 |
"extra_special_tokens": {},
|
| 49 |
"mask_token": "[MASK]",
|
| 50 |
-
"
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
|
|
|
| 54 |
"pad_token": "[PAD]",
|
| 55 |
-
"pad_token_type_id": 0,
|
| 56 |
-
"padding_side": "right",
|
| 57 |
"sep_token": "[SEP]",
|
| 58 |
-
"
|
| 59 |
-
"strip_accents": null,
|
| 60 |
-
"tokenize_chinese_chars": true,
|
| 61 |
-
"tokenizer_class": "BertTokenizer",
|
| 62 |
-
"truncation_side": "right",
|
| 63 |
-
"truncation_strategy": "longest_first",
|
| 64 |
"unk_token": "[UNK]"
|
| 65 |
}
|
|
|
|
| 1 |
{
|
| 2 |
"added_tokens_decoder": {
|
| 3 |
"0": {
|
| 4 |
+
"content": "|||IP_ADDRESS|||",
|
| 5 |
+
"lstrip": false,
|
| 6 |
+
"normalized": true,
|
| 7 |
+
"rstrip": false,
|
| 8 |
+
"single_word": false,
|
| 9 |
+
"special": false
|
| 10 |
+
},
|
| 11 |
+
"1": {
|
| 12 |
+
"content": "<|padding|>",
|
| 13 |
"lstrip": false,
|
| 14 |
"normalized": false,
|
| 15 |
"rstrip": false,
|
| 16 |
"single_word": false,
|
| 17 |
"special": true
|
| 18 |
},
|
| 19 |
+
"50254": {
|
| 20 |
+
"content": " ",
|
| 21 |
+
"lstrip": false,
|
| 22 |
+
"normalized": true,
|
| 23 |
+
"rstrip": false,
|
| 24 |
+
"single_word": false,
|
| 25 |
+
"special": false
|
| 26 |
+
},
|
| 27 |
+
"50255": {
|
| 28 |
+
"content": " ",
|
| 29 |
+
"lstrip": false,
|
| 30 |
+
"normalized": true,
|
| 31 |
+
"rstrip": false,
|
| 32 |
+
"single_word": false,
|
| 33 |
+
"special": false
|
| 34 |
+
},
|
| 35 |
+
"50256": {
|
| 36 |
+
"content": " ",
|
| 37 |
+
"lstrip": false,
|
| 38 |
+
"normalized": true,
|
| 39 |
+
"rstrip": false,
|
| 40 |
+
"single_word": false,
|
| 41 |
+
"special": false
|
| 42 |
+
},
|
| 43 |
+
"50257": {
|
| 44 |
+
"content": " ",
|
| 45 |
+
"lstrip": false,
|
| 46 |
+
"normalized": true,
|
| 47 |
+
"rstrip": false,
|
| 48 |
+
"single_word": false,
|
| 49 |
+
"special": false
|
| 50 |
+
},
|
| 51 |
+
"50258": {
|
| 52 |
+
"content": " ",
|
| 53 |
+
"lstrip": false,
|
| 54 |
+
"normalized": true,
|
| 55 |
+
"rstrip": false,
|
| 56 |
+
"single_word": false,
|
| 57 |
+
"special": false
|
| 58 |
+
},
|
| 59 |
+
"50259": {
|
| 60 |
+
"content": " ",
|
| 61 |
+
"lstrip": false,
|
| 62 |
+
"normalized": true,
|
| 63 |
+
"rstrip": false,
|
| 64 |
+
"single_word": false,
|
| 65 |
+
"special": false
|
| 66 |
+
},
|
| 67 |
+
"50260": {
|
| 68 |
+
"content": " ",
|
| 69 |
+
"lstrip": false,
|
| 70 |
+
"normalized": true,
|
| 71 |
+
"rstrip": false,
|
| 72 |
+
"single_word": false,
|
| 73 |
+
"special": false
|
| 74 |
+
},
|
| 75 |
+
"50261": {
|
| 76 |
+
"content": " ",
|
| 77 |
+
"lstrip": false,
|
| 78 |
+
"normalized": true,
|
| 79 |
+
"rstrip": false,
|
| 80 |
+
"single_word": false,
|
| 81 |
+
"special": false
|
| 82 |
+
},
|
| 83 |
+
"50262": {
|
| 84 |
+
"content": " ",
|
| 85 |
+
"lstrip": false,
|
| 86 |
+
"normalized": true,
|
| 87 |
+
"rstrip": false,
|
| 88 |
+
"single_word": false,
|
| 89 |
+
"special": false
|
| 90 |
+
},
|
| 91 |
+
"50263": {
|
| 92 |
+
"content": " ",
|
| 93 |
+
"lstrip": false,
|
| 94 |
+
"normalized": true,
|
| 95 |
+
"rstrip": false,
|
| 96 |
+
"single_word": false,
|
| 97 |
+
"special": false
|
| 98 |
+
},
|
| 99 |
+
"50264": {
|
| 100 |
+
"content": " ",
|
| 101 |
+
"lstrip": false,
|
| 102 |
+
"normalized": true,
|
| 103 |
+
"rstrip": false,
|
| 104 |
+
"single_word": false,
|
| 105 |
+
"special": false
|
| 106 |
+
},
|
| 107 |
+
"50265": {
|
| 108 |
+
"content": " ",
|
| 109 |
+
"lstrip": false,
|
| 110 |
+
"normalized": true,
|
| 111 |
+
"rstrip": false,
|
| 112 |
+
"single_word": false,
|
| 113 |
+
"special": false
|
| 114 |
+
},
|
| 115 |
+
"50266": {
|
| 116 |
+
"content": " ",
|
| 117 |
+
"lstrip": false,
|
| 118 |
+
"normalized": true,
|
| 119 |
+
"rstrip": false,
|
| 120 |
+
"single_word": false,
|
| 121 |
+
"special": false
|
| 122 |
+
},
|
| 123 |
+
"50267": {
|
| 124 |
+
"content": " ",
|
| 125 |
+
"lstrip": false,
|
| 126 |
+
"normalized": true,
|
| 127 |
+
"rstrip": false,
|
| 128 |
+
"single_word": false,
|
| 129 |
+
"special": false
|
| 130 |
+
},
|
| 131 |
+
"50268": {
|
| 132 |
+
"content": " ",
|
| 133 |
+
"lstrip": false,
|
| 134 |
+
"normalized": true,
|
| 135 |
+
"rstrip": false,
|
| 136 |
+
"single_word": false,
|
| 137 |
+
"special": false
|
| 138 |
+
},
|
| 139 |
+
"50269": {
|
| 140 |
+
"content": " ",
|
| 141 |
+
"lstrip": false,
|
| 142 |
+
"normalized": true,
|
| 143 |
+
"rstrip": false,
|
| 144 |
+
"single_word": false,
|
| 145 |
+
"special": false
|
| 146 |
+
},
|
| 147 |
+
"50270": {
|
| 148 |
+
"content": " ",
|
| 149 |
+
"lstrip": false,
|
| 150 |
+
"normalized": true,
|
| 151 |
+
"rstrip": false,
|
| 152 |
+
"single_word": false,
|
| 153 |
+
"special": false
|
| 154 |
+
},
|
| 155 |
+
"50271": {
|
| 156 |
+
"content": " ",
|
| 157 |
+
"lstrip": false,
|
| 158 |
+
"normalized": true,
|
| 159 |
+
"rstrip": false,
|
| 160 |
+
"single_word": false,
|
| 161 |
+
"special": false
|
| 162 |
+
},
|
| 163 |
+
"50272": {
|
| 164 |
+
"content": " ",
|
| 165 |
+
"lstrip": false,
|
| 166 |
+
"normalized": true,
|
| 167 |
+
"rstrip": false,
|
| 168 |
+
"single_word": false,
|
| 169 |
+
"special": false
|
| 170 |
+
},
|
| 171 |
+
"50273": {
|
| 172 |
+
"content": " ",
|
| 173 |
+
"lstrip": false,
|
| 174 |
+
"normalized": true,
|
| 175 |
+
"rstrip": false,
|
| 176 |
+
"single_word": false,
|
| 177 |
+
"special": false
|
| 178 |
+
},
|
| 179 |
+
"50274": {
|
| 180 |
+
"content": " ",
|
| 181 |
+
"lstrip": false,
|
| 182 |
+
"normalized": true,
|
| 183 |
+
"rstrip": false,
|
| 184 |
+
"single_word": false,
|
| 185 |
+
"special": false
|
| 186 |
+
},
|
| 187 |
+
"50275": {
|
| 188 |
+
"content": " ",
|
| 189 |
+
"lstrip": false,
|
| 190 |
+
"normalized": true,
|
| 191 |
+
"rstrip": false,
|
| 192 |
+
"single_word": false,
|
| 193 |
+
"special": false
|
| 194 |
+
},
|
| 195 |
+
"50276": {
|
| 196 |
+
"content": " ",
|
| 197 |
+
"lstrip": false,
|
| 198 |
+
"normalized": true,
|
| 199 |
+
"rstrip": false,
|
| 200 |
+
"single_word": false,
|
| 201 |
+
"special": false
|
| 202 |
+
},
|
| 203 |
+
"50277": {
|
| 204 |
+
"content": "|||EMAIL_ADDRESS|||",
|
| 205 |
+
"lstrip": false,
|
| 206 |
+
"normalized": true,
|
| 207 |
+
"rstrip": false,
|
| 208 |
+
"single_word": false,
|
| 209 |
+
"special": false
|
| 210 |
+
},
|
| 211 |
+
"50278": {
|
| 212 |
+
"content": "|||PHONE_NUMBER|||",
|
| 213 |
+
"lstrip": false,
|
| 214 |
+
"normalized": true,
|
| 215 |
+
"rstrip": false,
|
| 216 |
+
"single_word": false,
|
| 217 |
+
"special": false
|
| 218 |
+
},
|
| 219 |
+
"50279": {
|
| 220 |
+
"content": "<|endoftext|>",
|
| 221 |
+
"lstrip": false,
|
| 222 |
+
"normalized": false,
|
| 223 |
+
"rstrip": false,
|
| 224 |
+
"single_word": false,
|
| 225 |
+
"special": true
|
| 226 |
+
},
|
| 227 |
+
"50280": {
|
| 228 |
"content": "[UNK]",
|
| 229 |
"lstrip": false,
|
| 230 |
"normalized": false,
|
|
|
|
| 232 |
"single_word": false,
|
| 233 |
"special": true
|
| 234 |
},
|
| 235 |
+
"50281": {
|
| 236 |
"content": "[CLS]",
|
| 237 |
"lstrip": false,
|
| 238 |
"normalized": false,
|
|
|
|
| 240 |
"single_word": false,
|
| 241 |
"special": true
|
| 242 |
},
|
| 243 |
+
"50282": {
|
| 244 |
"content": "[SEP]",
|
| 245 |
"lstrip": false,
|
| 246 |
"normalized": false,
|
|
|
|
| 248 |
"single_word": false,
|
| 249 |
"special": true
|
| 250 |
},
|
| 251 |
+
"50283": {
|
| 252 |
+
"content": "[PAD]",
|
| 253 |
"lstrip": false,
|
| 254 |
"normalized": false,
|
| 255 |
"rstrip": false,
|
| 256 |
"single_word": false,
|
| 257 |
"special": true
|
| 258 |
+
},
|
| 259 |
+
"50284": {
|
| 260 |
+
"content": "[MASK]",
|
| 261 |
+
"lstrip": true,
|
| 262 |
+
"normalized": false,
|
| 263 |
+
"rstrip": false,
|
| 264 |
+
"single_word": false,
|
| 265 |
+
"special": true
|
| 266 |
+
},
|
| 267 |
+
"50285": {
|
| 268 |
+
"content": "[unused0]",
|
| 269 |
+
"lstrip": false,
|
| 270 |
+
"normalized": true,
|
| 271 |
+
"rstrip": false,
|
| 272 |
+
"single_word": false,
|
| 273 |
+
"special": false
|
| 274 |
+
},
|
| 275 |
+
"50286": {
|
| 276 |
+
"content": "[unused1]",
|
| 277 |
+
"lstrip": false,
|
| 278 |
+
"normalized": true,
|
| 279 |
+
"rstrip": false,
|
| 280 |
+
"single_word": false,
|
| 281 |
+
"special": false
|
| 282 |
+
},
|
| 283 |
+
"50287": {
|
| 284 |
+
"content": "[unused2]",
|
| 285 |
+
"lstrip": false,
|
| 286 |
+
"normalized": true,
|
| 287 |
+
"rstrip": false,
|
| 288 |
+
"single_word": false,
|
| 289 |
+
"special": false
|
| 290 |
+
},
|
| 291 |
+
"50288": {
|
| 292 |
+
"content": "[unused3]",
|
| 293 |
+
"lstrip": false,
|
| 294 |
+
"normalized": true,
|
| 295 |
+
"rstrip": false,
|
| 296 |
+
"single_word": false,
|
| 297 |
+
"special": false
|
| 298 |
+
},
|
| 299 |
+
"50289": {
|
| 300 |
+
"content": "[unused4]",
|
| 301 |
+
"lstrip": false,
|
| 302 |
+
"normalized": true,
|
| 303 |
+
"rstrip": false,
|
| 304 |
+
"single_word": false,
|
| 305 |
+
"special": false
|
| 306 |
+
},
|
| 307 |
+
"50290": {
|
| 308 |
+
"content": "[unused5]",
|
| 309 |
+
"lstrip": false,
|
| 310 |
+
"normalized": true,
|
| 311 |
+
"rstrip": false,
|
| 312 |
+
"single_word": false,
|
| 313 |
+
"special": false
|
| 314 |
+
},
|
| 315 |
+
"50291": {
|
| 316 |
+
"content": "[unused6]",
|
| 317 |
+
"lstrip": false,
|
| 318 |
+
"normalized": true,
|
| 319 |
+
"rstrip": false,
|
| 320 |
+
"single_word": false,
|
| 321 |
+
"special": false
|
| 322 |
+
},
|
| 323 |
+
"50292": {
|
| 324 |
+
"content": "[unused7]",
|
| 325 |
+
"lstrip": false,
|
| 326 |
+
"normalized": true,
|
| 327 |
+
"rstrip": false,
|
| 328 |
+
"single_word": false,
|
| 329 |
+
"special": false
|
| 330 |
+
},
|
| 331 |
+
"50293": {
|
| 332 |
+
"content": "[unused8]",
|
| 333 |
+
"lstrip": false,
|
| 334 |
+
"normalized": true,
|
| 335 |
+
"rstrip": false,
|
| 336 |
+
"single_word": false,
|
| 337 |
+
"special": false
|
| 338 |
+
},
|
| 339 |
+
"50294": {
|
| 340 |
+
"content": "[unused9]",
|
| 341 |
+
"lstrip": false,
|
| 342 |
+
"normalized": true,
|
| 343 |
+
"rstrip": false,
|
| 344 |
+
"single_word": false,
|
| 345 |
+
"special": false
|
| 346 |
+
},
|
| 347 |
+
"50295": {
|
| 348 |
+
"content": "[unused10]",
|
| 349 |
+
"lstrip": false,
|
| 350 |
+
"normalized": true,
|
| 351 |
+
"rstrip": false,
|
| 352 |
+
"single_word": false,
|
| 353 |
+
"special": false
|
| 354 |
+
},
|
| 355 |
+
"50296": {
|
| 356 |
+
"content": "[unused11]",
|
| 357 |
+
"lstrip": false,
|
| 358 |
+
"normalized": true,
|
| 359 |
+
"rstrip": false,
|
| 360 |
+
"single_word": false,
|
| 361 |
+
"special": false
|
| 362 |
+
},
|
| 363 |
+
"50297": {
|
| 364 |
+
"content": "[unused12]",
|
| 365 |
+
"lstrip": false,
|
| 366 |
+
"normalized": true,
|
| 367 |
+
"rstrip": false,
|
| 368 |
+
"single_word": false,
|
| 369 |
+
"special": false
|
| 370 |
+
},
|
| 371 |
+
"50298": {
|
| 372 |
+
"content": "[unused13]",
|
| 373 |
+
"lstrip": false,
|
| 374 |
+
"normalized": true,
|
| 375 |
+
"rstrip": false,
|
| 376 |
+
"single_word": false,
|
| 377 |
+
"special": false
|
| 378 |
+
},
|
| 379 |
+
"50299": {
|
| 380 |
+
"content": "[unused14]",
|
| 381 |
+
"lstrip": false,
|
| 382 |
+
"normalized": true,
|
| 383 |
+
"rstrip": false,
|
| 384 |
+
"single_word": false,
|
| 385 |
+
"special": false
|
| 386 |
+
},
|
| 387 |
+
"50300": {
|
| 388 |
+
"content": "[unused15]",
|
| 389 |
+
"lstrip": false,
|
| 390 |
+
"normalized": true,
|
| 391 |
+
"rstrip": false,
|
| 392 |
+
"single_word": false,
|
| 393 |
+
"special": false
|
| 394 |
+
},
|
| 395 |
+
"50301": {
|
| 396 |
+
"content": "[unused16]",
|
| 397 |
+
"lstrip": false,
|
| 398 |
+
"normalized": true,
|
| 399 |
+
"rstrip": false,
|
| 400 |
+
"single_word": false,
|
| 401 |
+
"special": false
|
| 402 |
+
},
|
| 403 |
+
"50302": {
|
| 404 |
+
"content": "[unused17]",
|
| 405 |
+
"lstrip": false,
|
| 406 |
+
"normalized": true,
|
| 407 |
+
"rstrip": false,
|
| 408 |
+
"single_word": false,
|
| 409 |
+
"special": false
|
| 410 |
+
},
|
| 411 |
+
"50303": {
|
| 412 |
+
"content": "[unused18]",
|
| 413 |
+
"lstrip": false,
|
| 414 |
+
"normalized": true,
|
| 415 |
+
"rstrip": false,
|
| 416 |
+
"single_word": false,
|
| 417 |
+
"special": false
|
| 418 |
+
},
|
| 419 |
+
"50304": {
|
| 420 |
+
"content": "[unused19]",
|
| 421 |
+
"lstrip": false,
|
| 422 |
+
"normalized": true,
|
| 423 |
+
"rstrip": false,
|
| 424 |
+
"single_word": false,
|
| 425 |
+
"special": false
|
| 426 |
+
},
|
| 427 |
+
"50305": {
|
| 428 |
+
"content": "[unused20]",
|
| 429 |
+
"lstrip": false,
|
| 430 |
+
"normalized": true,
|
| 431 |
+
"rstrip": false,
|
| 432 |
+
"single_word": false,
|
| 433 |
+
"special": false
|
| 434 |
+
},
|
| 435 |
+
"50306": {
|
| 436 |
+
"content": "[unused21]",
|
| 437 |
+
"lstrip": false,
|
| 438 |
+
"normalized": true,
|
| 439 |
+
"rstrip": false,
|
| 440 |
+
"single_word": false,
|
| 441 |
+
"special": false
|
| 442 |
+
},
|
| 443 |
+
"50307": {
|
| 444 |
+
"content": "[unused22]",
|
| 445 |
+
"lstrip": false,
|
| 446 |
+
"normalized": true,
|
| 447 |
+
"rstrip": false,
|
| 448 |
+
"single_word": false,
|
| 449 |
+
"special": false
|
| 450 |
+
},
|
| 451 |
+
"50308": {
|
| 452 |
+
"content": "[unused23]",
|
| 453 |
+
"lstrip": false,
|
| 454 |
+
"normalized": true,
|
| 455 |
+
"rstrip": false,
|
| 456 |
+
"single_word": false,
|
| 457 |
+
"special": false
|
| 458 |
+
},
|
| 459 |
+
"50309": {
|
| 460 |
+
"content": "[unused24]",
|
| 461 |
+
"lstrip": false,
|
| 462 |
+
"normalized": true,
|
| 463 |
+
"rstrip": false,
|
| 464 |
+
"single_word": false,
|
| 465 |
+
"special": false
|
| 466 |
+
},
|
| 467 |
+
"50310": {
|
| 468 |
+
"content": "[unused25]",
|
| 469 |
+
"lstrip": false,
|
| 470 |
+
"normalized": true,
|
| 471 |
+
"rstrip": false,
|
| 472 |
+
"single_word": false,
|
| 473 |
+
"special": false
|
| 474 |
+
},
|
| 475 |
+
"50311": {
|
| 476 |
+
"content": "[unused26]",
|
| 477 |
+
"lstrip": false,
|
| 478 |
+
"normalized": true,
|
| 479 |
+
"rstrip": false,
|
| 480 |
+
"single_word": false,
|
| 481 |
+
"special": false
|
| 482 |
+
},
|
| 483 |
+
"50312": {
|
| 484 |
+
"content": "[unused27]",
|
| 485 |
+
"lstrip": false,
|
| 486 |
+
"normalized": true,
|
| 487 |
+
"rstrip": false,
|
| 488 |
+
"single_word": false,
|
| 489 |
+
"special": false
|
| 490 |
+
},
|
| 491 |
+
"50313": {
|
| 492 |
+
"content": "[unused28]",
|
| 493 |
+
"lstrip": false,
|
| 494 |
+
"normalized": true,
|
| 495 |
+
"rstrip": false,
|
| 496 |
+
"single_word": false,
|
| 497 |
+
"special": false
|
| 498 |
+
},
|
| 499 |
+
"50314": {
|
| 500 |
+
"content": "[unused29]",
|
| 501 |
+
"lstrip": false,
|
| 502 |
+
"normalized": true,
|
| 503 |
+
"rstrip": false,
|
| 504 |
+
"single_word": false,
|
| 505 |
+
"special": false
|
| 506 |
+
},
|
| 507 |
+
"50315": {
|
| 508 |
+
"content": "[unused30]",
|
| 509 |
+
"lstrip": false,
|
| 510 |
+
"normalized": true,
|
| 511 |
+
"rstrip": false,
|
| 512 |
+
"single_word": false,
|
| 513 |
+
"special": false
|
| 514 |
+
},
|
| 515 |
+
"50316": {
|
| 516 |
+
"content": "[unused31]",
|
| 517 |
+
"lstrip": false,
|
| 518 |
+
"normalized": true,
|
| 519 |
+
"rstrip": false,
|
| 520 |
+
"single_word": false,
|
| 521 |
+
"special": false
|
| 522 |
+
},
|
| 523 |
+
"50317": {
|
| 524 |
+
"content": "[unused32]",
|
| 525 |
+
"lstrip": false,
|
| 526 |
+
"normalized": true,
|
| 527 |
+
"rstrip": false,
|
| 528 |
+
"single_word": false,
|
| 529 |
+
"special": false
|
| 530 |
+
},
|
| 531 |
+
"50318": {
|
| 532 |
+
"content": "[unused33]",
|
| 533 |
+
"lstrip": false,
|
| 534 |
+
"normalized": true,
|
| 535 |
+
"rstrip": false,
|
| 536 |
+
"single_word": false,
|
| 537 |
+
"special": false
|
| 538 |
+
},
|
| 539 |
+
"50319": {
|
| 540 |
+
"content": "[unused34]",
|
| 541 |
+
"lstrip": false,
|
| 542 |
+
"normalized": true,
|
| 543 |
+
"rstrip": false,
|
| 544 |
+
"single_word": false,
|
| 545 |
+
"special": false
|
| 546 |
+
},
|
| 547 |
+
"50320": {
|
| 548 |
+
"content": "[unused35]",
|
| 549 |
+
"lstrip": false,
|
| 550 |
+
"normalized": true,
|
| 551 |
+
"rstrip": false,
|
| 552 |
+
"single_word": false,
|
| 553 |
+
"special": false
|
| 554 |
+
},
|
| 555 |
+
"50321": {
|
| 556 |
+
"content": "[unused36]",
|
| 557 |
+
"lstrip": false,
|
| 558 |
+
"normalized": true,
|
| 559 |
+
"rstrip": false,
|
| 560 |
+
"single_word": false,
|
| 561 |
+
"special": false
|
| 562 |
+
},
|
| 563 |
+
"50322": {
|
| 564 |
+
"content": "[unused37]",
|
| 565 |
+
"lstrip": false,
|
| 566 |
+
"normalized": true,
|
| 567 |
+
"rstrip": false,
|
| 568 |
+
"single_word": false,
|
| 569 |
+
"special": false
|
| 570 |
+
},
|
| 571 |
+
"50323": {
|
| 572 |
+
"content": "[unused38]",
|
| 573 |
+
"lstrip": false,
|
| 574 |
+
"normalized": true,
|
| 575 |
+
"rstrip": false,
|
| 576 |
+
"single_word": false,
|
| 577 |
+
"special": false
|
| 578 |
+
},
|
| 579 |
+
"50324": {
|
| 580 |
+
"content": "[unused39]",
|
| 581 |
+
"lstrip": false,
|
| 582 |
+
"normalized": true,
|
| 583 |
+
"rstrip": false,
|
| 584 |
+
"single_word": false,
|
| 585 |
+
"special": false
|
| 586 |
+
},
|
| 587 |
+
"50325": {
|
| 588 |
+
"content": "[unused40]",
|
| 589 |
+
"lstrip": false,
|
| 590 |
+
"normalized": true,
|
| 591 |
+
"rstrip": false,
|
| 592 |
+
"single_word": false,
|
| 593 |
+
"special": false
|
| 594 |
+
},
|
| 595 |
+
"50326": {
|
| 596 |
+
"content": "[unused41]",
|
| 597 |
+
"lstrip": false,
|
| 598 |
+
"normalized": true,
|
| 599 |
+
"rstrip": false,
|
| 600 |
+
"single_word": false,
|
| 601 |
+
"special": false
|
| 602 |
+
},
|
| 603 |
+
"50327": {
|
| 604 |
+
"content": "[unused42]",
|
| 605 |
+
"lstrip": false,
|
| 606 |
+
"normalized": true,
|
| 607 |
+
"rstrip": false,
|
| 608 |
+
"single_word": false,
|
| 609 |
+
"special": false
|
| 610 |
+
},
|
| 611 |
+
"50328": {
|
| 612 |
+
"content": "[unused43]",
|
| 613 |
+
"lstrip": false,
|
| 614 |
+
"normalized": true,
|
| 615 |
+
"rstrip": false,
|
| 616 |
+
"single_word": false,
|
| 617 |
+
"special": false
|
| 618 |
+
},
|
| 619 |
+
"50329": {
|
| 620 |
+
"content": "[unused44]",
|
| 621 |
+
"lstrip": false,
|
| 622 |
+
"normalized": true,
|
| 623 |
+
"rstrip": false,
|
| 624 |
+
"single_word": false,
|
| 625 |
+
"special": false
|
| 626 |
+
},
|
| 627 |
+
"50330": {
|
| 628 |
+
"content": "[unused45]",
|
| 629 |
+
"lstrip": false,
|
| 630 |
+
"normalized": true,
|
| 631 |
+
"rstrip": false,
|
| 632 |
+
"single_word": false,
|
| 633 |
+
"special": false
|
| 634 |
+
},
|
| 635 |
+
"50331": {
|
| 636 |
+
"content": "[unused46]",
|
| 637 |
+
"lstrip": false,
|
| 638 |
+
"normalized": true,
|
| 639 |
+
"rstrip": false,
|
| 640 |
+
"single_word": false,
|
| 641 |
+
"special": false
|
| 642 |
+
},
|
| 643 |
+
"50332": {
|
| 644 |
+
"content": "[unused47]",
|
| 645 |
+
"lstrip": false,
|
| 646 |
+
"normalized": true,
|
| 647 |
+
"rstrip": false,
|
| 648 |
+
"single_word": false,
|
| 649 |
+
"special": false
|
| 650 |
+
},
|
| 651 |
+
"50333": {
|
| 652 |
+
"content": "[unused48]",
|
| 653 |
+
"lstrip": false,
|
| 654 |
+
"normalized": true,
|
| 655 |
+
"rstrip": false,
|
| 656 |
+
"single_word": false,
|
| 657 |
+
"special": false
|
| 658 |
+
},
|
| 659 |
+
"50334": {
|
| 660 |
+
"content": "[unused49]",
|
| 661 |
+
"lstrip": false,
|
| 662 |
+
"normalized": true,
|
| 663 |
+
"rstrip": false,
|
| 664 |
+
"single_word": false,
|
| 665 |
+
"special": false
|
| 666 |
+
},
|
| 667 |
+
"50335": {
|
| 668 |
+
"content": "[unused50]",
|
| 669 |
+
"lstrip": false,
|
| 670 |
+
"normalized": true,
|
| 671 |
+
"rstrip": false,
|
| 672 |
+
"single_word": false,
|
| 673 |
+
"special": false
|
| 674 |
+
},
|
| 675 |
+
"50336": {
|
| 676 |
+
"content": "[unused51]",
|
| 677 |
+
"lstrip": false,
|
| 678 |
+
"normalized": true,
|
| 679 |
+
"rstrip": false,
|
| 680 |
+
"single_word": false,
|
| 681 |
+
"special": false
|
| 682 |
+
},
|
| 683 |
+
"50337": {
|
| 684 |
+
"content": "[unused52]",
|
| 685 |
+
"lstrip": false,
|
| 686 |
+
"normalized": true,
|
| 687 |
+
"rstrip": false,
|
| 688 |
+
"single_word": false,
|
| 689 |
+
"special": false
|
| 690 |
+
},
|
| 691 |
+
"50338": {
|
| 692 |
+
"content": "[unused53]",
|
| 693 |
+
"lstrip": false,
|
| 694 |
+
"normalized": true,
|
| 695 |
+
"rstrip": false,
|
| 696 |
+
"single_word": false,
|
| 697 |
+
"special": false
|
| 698 |
+
},
|
| 699 |
+
"50339": {
|
| 700 |
+
"content": "[unused54]",
|
| 701 |
+
"lstrip": false,
|
| 702 |
+
"normalized": true,
|
| 703 |
+
"rstrip": false,
|
| 704 |
+
"single_word": false,
|
| 705 |
+
"special": false
|
| 706 |
+
},
|
| 707 |
+
"50340": {
|
| 708 |
+
"content": "[unused55]",
|
| 709 |
+
"lstrip": false,
|
| 710 |
+
"normalized": true,
|
| 711 |
+
"rstrip": false,
|
| 712 |
+
"single_word": false,
|
| 713 |
+
"special": false
|
| 714 |
+
},
|
| 715 |
+
"50341": {
|
| 716 |
+
"content": "[unused56]",
|
| 717 |
+
"lstrip": false,
|
| 718 |
+
"normalized": true,
|
| 719 |
+
"rstrip": false,
|
| 720 |
+
"single_word": false,
|
| 721 |
+
"special": false
|
| 722 |
+
},
|
| 723 |
+
"50342": {
|
| 724 |
+
"content": "[unused57]",
|
| 725 |
+
"lstrip": false,
|
| 726 |
+
"normalized": true,
|
| 727 |
+
"rstrip": false,
|
| 728 |
+
"single_word": false,
|
| 729 |
+
"special": false
|
| 730 |
+
},
|
| 731 |
+
"50343": {
|
| 732 |
+
"content": "[unused58]",
|
| 733 |
+
"lstrip": false,
|
| 734 |
+
"normalized": true,
|
| 735 |
+
"rstrip": false,
|
| 736 |
+
"single_word": false,
|
| 737 |
+
"special": false
|
| 738 |
+
},
|
| 739 |
+
"50344": {
|
| 740 |
+
"content": "[unused59]",
|
| 741 |
+
"lstrip": false,
|
| 742 |
+
"normalized": true,
|
| 743 |
+
"rstrip": false,
|
| 744 |
+
"single_word": false,
|
| 745 |
+
"special": false
|
| 746 |
+
},
|
| 747 |
+
"50345": {
|
| 748 |
+
"content": "[unused60]",
|
| 749 |
+
"lstrip": false,
|
| 750 |
+
"normalized": true,
|
| 751 |
+
"rstrip": false,
|
| 752 |
+
"single_word": false,
|
| 753 |
+
"special": false
|
| 754 |
+
},
|
| 755 |
+
"50346": {
|
| 756 |
+
"content": "[unused61]",
|
| 757 |
+
"lstrip": false,
|
| 758 |
+
"normalized": true,
|
| 759 |
+
"rstrip": false,
|
| 760 |
+
"single_word": false,
|
| 761 |
+
"special": false
|
| 762 |
+
},
|
| 763 |
+
"50347": {
|
| 764 |
+
"content": "[unused62]",
|
| 765 |
+
"lstrip": false,
|
| 766 |
+
"normalized": true,
|
| 767 |
+
"rstrip": false,
|
| 768 |
+
"single_word": false,
|
| 769 |
+
"special": false
|
| 770 |
+
},
|
| 771 |
+
"50348": {
|
| 772 |
+
"content": "[unused63]",
|
| 773 |
+
"lstrip": false,
|
| 774 |
+
"normalized": true,
|
| 775 |
+
"rstrip": false,
|
| 776 |
+
"single_word": false,
|
| 777 |
+
"special": false
|
| 778 |
+
},
|
| 779 |
+
"50349": {
|
| 780 |
+
"content": "[unused64]",
|
| 781 |
+
"lstrip": false,
|
| 782 |
+
"normalized": true,
|
| 783 |
+
"rstrip": false,
|
| 784 |
+
"single_word": false,
|
| 785 |
+
"special": false
|
| 786 |
+
},
|
| 787 |
+
"50350": {
|
| 788 |
+
"content": "[unused65]",
|
| 789 |
+
"lstrip": false,
|
| 790 |
+
"normalized": true,
|
| 791 |
+
"rstrip": false,
|
| 792 |
+
"single_word": false,
|
| 793 |
+
"special": false
|
| 794 |
+
},
|
| 795 |
+
"50351": {
|
| 796 |
+
"content": "[unused66]",
|
| 797 |
+
"lstrip": false,
|
| 798 |
+
"normalized": true,
|
| 799 |
+
"rstrip": false,
|
| 800 |
+
"single_word": false,
|
| 801 |
+
"special": false
|
| 802 |
+
},
|
| 803 |
+
"50352": {
|
| 804 |
+
"content": "[unused67]",
|
| 805 |
+
"lstrip": false,
|
| 806 |
+
"normalized": true,
|
| 807 |
+
"rstrip": false,
|
| 808 |
+
"single_word": false,
|
| 809 |
+
"special": false
|
| 810 |
+
},
|
| 811 |
+
"50353": {
|
| 812 |
+
"content": "[unused68]",
|
| 813 |
+
"lstrip": false,
|
| 814 |
+
"normalized": true,
|
| 815 |
+
"rstrip": false,
|
| 816 |
+
"single_word": false,
|
| 817 |
+
"special": false
|
| 818 |
+
},
|
| 819 |
+
"50354": {
|
| 820 |
+
"content": "[unused69]",
|
| 821 |
+
"lstrip": false,
|
| 822 |
+
"normalized": true,
|
| 823 |
+
"rstrip": false,
|
| 824 |
+
"single_word": false,
|
| 825 |
+
"special": false
|
| 826 |
+
},
|
| 827 |
+
"50355": {
|
| 828 |
+
"content": "[unused70]",
|
| 829 |
+
"lstrip": false,
|
| 830 |
+
"normalized": true,
|
| 831 |
+
"rstrip": false,
|
| 832 |
+
"single_word": false,
|
| 833 |
+
"special": false
|
| 834 |
+
},
|
| 835 |
+
"50356": {
|
| 836 |
+
"content": "[unused71]",
|
| 837 |
+
"lstrip": false,
|
| 838 |
+
"normalized": true,
|
| 839 |
+
"rstrip": false,
|
| 840 |
+
"single_word": false,
|
| 841 |
+
"special": false
|
| 842 |
+
},
|
| 843 |
+
"50357": {
|
| 844 |
+
"content": "[unused72]",
|
| 845 |
+
"lstrip": false,
|
| 846 |
+
"normalized": true,
|
| 847 |
+
"rstrip": false,
|
| 848 |
+
"single_word": false,
|
| 849 |
+
"special": false
|
| 850 |
+
},
|
| 851 |
+
"50358": {
|
| 852 |
+
"content": "[unused73]",
|
| 853 |
+
"lstrip": false,
|
| 854 |
+
"normalized": true,
|
| 855 |
+
"rstrip": false,
|
| 856 |
+
"single_word": false,
|
| 857 |
+
"special": false
|
| 858 |
+
},
|
| 859 |
+
"50359": {
|
| 860 |
+
"content": "[unused74]",
|
| 861 |
+
"lstrip": false,
|
| 862 |
+
"normalized": true,
|
| 863 |
+
"rstrip": false,
|
| 864 |
+
"single_word": false,
|
| 865 |
+
"special": false
|
| 866 |
+
},
|
| 867 |
+
"50360": {
|
| 868 |
+
"content": "[unused75]",
|
| 869 |
+
"lstrip": false,
|
| 870 |
+
"normalized": true,
|
| 871 |
+
"rstrip": false,
|
| 872 |
+
"single_word": false,
|
| 873 |
+
"special": false
|
| 874 |
+
},
|
| 875 |
+
"50361": {
|
| 876 |
+
"content": "[unused76]",
|
| 877 |
+
"lstrip": false,
|
| 878 |
+
"normalized": true,
|
| 879 |
+
"rstrip": false,
|
| 880 |
+
"single_word": false,
|
| 881 |
+
"special": false
|
| 882 |
+
},
|
| 883 |
+
"50362": {
|
| 884 |
+
"content": "[unused77]",
|
| 885 |
+
"lstrip": false,
|
| 886 |
+
"normalized": true,
|
| 887 |
+
"rstrip": false,
|
| 888 |
+
"single_word": false,
|
| 889 |
+
"special": false
|
| 890 |
+
},
|
| 891 |
+
"50363": {
|
| 892 |
+
"content": "[unused78]",
|
| 893 |
+
"lstrip": false,
|
| 894 |
+
"normalized": true,
|
| 895 |
+
"rstrip": false,
|
| 896 |
+
"single_word": false,
|
| 897 |
+
"special": false
|
| 898 |
+
},
|
| 899 |
+
"50364": {
|
| 900 |
+
"content": "[unused79]",
|
| 901 |
+
"lstrip": false,
|
| 902 |
+
"normalized": true,
|
| 903 |
+
"rstrip": false,
|
| 904 |
+
"single_word": false,
|
| 905 |
+
"special": false
|
| 906 |
+
},
|
| 907 |
+
"50365": {
|
| 908 |
+
"content": "[unused80]",
|
| 909 |
+
"lstrip": false,
|
| 910 |
+
"normalized": true,
|
| 911 |
+
"rstrip": false,
|
| 912 |
+
"single_word": false,
|
| 913 |
+
"special": false
|
| 914 |
+
},
|
| 915 |
+
"50366": {
|
| 916 |
+
"content": "[unused81]",
|
| 917 |
+
"lstrip": false,
|
| 918 |
+
"normalized": true,
|
| 919 |
+
"rstrip": false,
|
| 920 |
+
"single_word": false,
|
| 921 |
+
"special": false
|
| 922 |
+
},
|
| 923 |
+
"50367": {
|
| 924 |
+
"content": "[unused82]",
|
| 925 |
+
"lstrip": false,
|
| 926 |
+
"normalized": true,
|
| 927 |
+
"rstrip": false,
|
| 928 |
+
"single_word": false,
|
| 929 |
+
"special": false
|
| 930 |
}
|
| 931 |
},
|
| 932 |
"clean_up_tokenization_spaces": true,
|
| 933 |
"cls_token": "[CLS]",
|
|
|
|
|
|
|
| 934 |
"extra_special_tokens": {},
|
| 935 |
"mask_token": "[MASK]",
|
| 936 |
+
"model_input_names": [
|
| 937 |
+
"input_ids",
|
| 938 |
+
"attention_mask"
|
| 939 |
+
],
|
| 940 |
+
"model_max_length": 1000000000000000019884624838656,
|
| 941 |
"pad_token": "[PAD]",
|
|
|
|
|
|
|
| 942 |
"sep_token": "[SEP]",
|
| 943 |
+
"tokenizer_class": "PreTrainedTokenizerFast",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 944 |
"unk_token": "[UNK]"
|
| 945 |
}
|