radoslavralev commited on
Commit
2d155b6
·
verified ·
1 Parent(s): 41d5685

Training in progress, step 5000

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
1_Pooling/config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "word_embedding_dimension": 384,
3
- "pooling_mode_cls_token": false,
4
- "pooling_mode_mean_tokens": true,
5
  "pooling_mode_max_tokens": false,
6
  "pooling_mode_mean_sqrt_len_tokens": false,
7
  "pooling_mode_weightedmean_tokens": false,
 
1
  {
2
+ "word_embedding_dimension": 512,
3
+ "pooling_mode_cls_token": true,
4
+ "pooling_mode_mean_tokens": false,
5
  "pooling_mode_max_tokens": false,
6
  "pooling_mode_mean_sqrt_len_tokens": false,
7
  "pooling_mode_weightedmean_tokens": false,
Information-Retrieval_evaluation_val_results.csv CHANGED
@@ -9,3 +9,4 @@ epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Precisi
9
  -1,-1,0.83665,0.91045,0.9361,0.83665,0.83665,0.3034833333333333,0.91045,0.18722000000000003,0.9361,0.83665,0.8753945833333286,0.8793089583333286,0.9000254411118587,0.8812821493075779
10
  -1,-1,0.827675,0.903,0.928425,0.827675,0.827675,0.30099999999999993,0.903,0.18568500000000004,0.928425,0.827675,0.8671804166666619,0.8711970039682481,0.8922532953454642,0.87334664003711
11
  -1,-1,0.83295,0.9071,0.9329,0.83295,0.83295,0.3023666666666666,0.9071,0.18658000000000005,0.9329,0.83295,0.872013749999996,0.8760916468253912,0.8970951855878305,0.8781372459990227
 
 
9
  -1,-1,0.83665,0.91045,0.9361,0.83665,0.83665,0.3034833333333333,0.91045,0.18722000000000003,0.9361,0.83665,0.8753945833333286,0.8793089583333286,0.9000254411118587,0.8812821493075779
10
  -1,-1,0.827675,0.903,0.928425,0.827675,0.827675,0.30099999999999993,0.903,0.18568500000000004,0.928425,0.827675,0.8671804166666619,0.8711970039682481,0.8922532953454642,0.87334664003711
11
  -1,-1,0.83295,0.9071,0.9329,0.83295,0.83295,0.3023666666666666,0.9071,0.18658000000000005,0.9329,0.83295,0.872013749999996,0.8760916468253912,0.8970951855878305,0.8781372459990227
12
+ -1,-1,0.83545,0.911175,0.9366,0.83545,0.83545,0.303725,0.911175,0.18732000000000001,0.9366,0.83545,0.8751591666666616,0.8790415476190412,0.8999318372974409,0.8810239994800558
README.md CHANGED
@@ -5,123 +5,51 @@ tags:
5
  - feature-extraction
6
  - dense
7
  - generated_from_trainer
8
- - dataset_size:713743
9
  - loss:MultipleNegativesRankingLoss
10
- base_model: thenlper/gte-small
11
  widget:
12
- - source_sentence: 'Abraham Lincoln: Why is the Gettysburg Address so memorable?'
13
  sentences:
14
- - 'Abraham Lincoln: Why is the Gettysburg Address so memorable?'
15
- - What does the Gettysburg Address really mean?
16
- - What is eatalo.com?
17
- - source_sentence: Has the influence of Ancient Carthage in science, math, and society
18
- been underestimated?
19
  sentences:
20
- - How does one earn money online without an investment from home?
21
- - Has the influence of Ancient Carthage in science, math, and society been underestimated?
22
- - Has the influence of the Ancient Etruscans in science and math been underestimated?
23
- - source_sentence: Is there any app that shares charging to others like share it how
24
- we transfer files?
25
  sentences:
26
- - How do you think of Chinese claims that the present Private Arbitration is illegal,
27
- its verdict violates the UNCLOS and is illegal?
28
- - Is there any app that shares charging to others like share it how we transfer
29
- files?
30
- - Are there any platforms that provides end-to-end encryption for file transfer/
31
- sharing?
32
- - source_sentence: Why AAP’s MLA Dinesh Mohaniya has been arrested?
33
  sentences:
34
- - What are your views on the latest sex scandal by AAP MLA Sandeep Kumar?
35
- - What is a dc current? What are some examples?
36
- - Why AAP’s MLA Dinesh Mohaniya has been arrested?
37
- - source_sentence: What is the difference between economic growth and economic development?
38
  sentences:
39
- - How cold can the Gobi Desert get, and how do its average temperatures compare
40
- to the ones in the Simpson Desert?
41
- - the difference between economic growth and economic development is What?
42
- - What is the difference between economic growth and economic development?
43
  pipeline_tag: sentence-similarity
44
  library_name: sentence-transformers
45
- metrics:
46
- - cosine_accuracy@1
47
- - cosine_accuracy@3
48
- - cosine_accuracy@5
49
- - cosine_precision@1
50
- - cosine_precision@3
51
- - cosine_precision@5
52
- - cosine_recall@1
53
- - cosine_recall@3
54
- - cosine_recall@5
55
- - cosine_ndcg@10
56
- - cosine_mrr@1
57
- - cosine_mrr@5
58
- - cosine_mrr@10
59
- - cosine_map@100
60
- model-index:
61
- - name: SentenceTransformer based on thenlper/gte-small
62
- results:
63
- - task:
64
- type: information-retrieval
65
- name: Information Retrieval
66
- dataset:
67
- name: val
68
- type: val
69
- metrics:
70
- - type: cosine_accuracy@1
71
- value: 0.83545
72
- name: Cosine Accuracy@1
73
- - type: cosine_accuracy@3
74
- value: 0.911175
75
- name: Cosine Accuracy@3
76
- - type: cosine_accuracy@5
77
- value: 0.9366
78
- name: Cosine Accuracy@5
79
- - type: cosine_precision@1
80
- value: 0.83545
81
- name: Cosine Precision@1
82
- - type: cosine_precision@3
83
- value: 0.303725
84
- name: Cosine Precision@3
85
- - type: cosine_precision@5
86
- value: 0.18732000000000001
87
- name: Cosine Precision@5
88
- - type: cosine_recall@1
89
- value: 0.83545
90
- name: Cosine Recall@1
91
- - type: cosine_recall@3
92
- value: 0.911175
93
- name: Cosine Recall@3
94
- - type: cosine_recall@5
95
- value: 0.9366
96
- name: Cosine Recall@5
97
- - type: cosine_ndcg@10
98
- value: 0.8999318372974409
99
- name: Cosine Ndcg@10
100
- - type: cosine_mrr@1
101
- value: 0.83545
102
- name: Cosine Mrr@1
103
- - type: cosine_mrr@5
104
- value: 0.8751591666666616
105
- name: Cosine Mrr@5
106
- - type: cosine_mrr@10
107
- value: 0.8790415476190412
108
- name: Cosine Mrr@10
109
- - type: cosine_map@100
110
- value: 0.8810239994800558
111
- name: Cosine Map@100
112
  ---
113
 
114
- # SentenceTransformer based on thenlper/gte-small
115
 
116
- This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [thenlper/gte-small](https://huggingface.co/thenlper/gte-small). It maps sentences & paragraphs to a 384-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
117
 
118
  ## Model Details
119
 
120
  ### Model Description
121
  - **Model Type:** Sentence Transformer
122
- - **Base model:** [thenlper/gte-small](https://huggingface.co/thenlper/gte-small) <!-- at revision 17e1f347d17fe144873b1201da91788898c639cd -->
123
  - **Maximum Sequence Length:** 128 tokens
124
- - **Output Dimensionality:** 384 dimensions
125
  - **Similarity Function:** Cosine Similarity
126
  <!-- - **Training Dataset:** Unknown -->
127
  <!-- - **Language:** Unknown -->
@@ -138,8 +66,7 @@ This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [t
138
  ```
139
  SentenceTransformer(
140
  (0): Transformer({'max_seq_length': 128, 'do_lower_case': False, 'architecture': 'BertModel'})
141
- (1): Pooling({'word_embedding_dimension': 384, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
142
- (2): Normalize()
143
  )
144
  ```
145
 
@@ -158,23 +85,23 @@ Then you can load this model and run inference.
158
  from sentence_transformers import SentenceTransformer
159
 
160
  # Download from the 🤗 Hub
161
- model = SentenceTransformer("redis/model-b-structured")
162
  # Run inference
163
  sentences = [
164
- 'What is the difference between economic growth and economic development?',
165
- 'What is the difference between economic growth and economic development?',
166
- 'the difference between economic growth and economic development is What?',
167
  ]
168
  embeddings = model.encode(sentences)
169
  print(embeddings.shape)
170
- # [3, 384]
171
 
172
  # Get the similarity scores for the embeddings
173
  similarities = model.similarity(embeddings, embeddings)
174
  print(similarities)
175
- # tensor([[ 1.0000, 1.0000, -0.0794],
176
- # [ 1.0000, 1.0000, -0.0794],
177
- # [-0.0794, -0.0794, 1.0000]])
178
  ```
179
 
180
  <!--
@@ -201,32 +128,6 @@ You can finetune this model on your own dataset.
201
  *List how the model may foreseeably be misused and address what users ought not to do with the model.*
202
  -->
203
 
204
- ## Evaluation
205
-
206
- ### Metrics
207
-
208
- #### Information Retrieval
209
-
210
- * Dataset: `val`
211
- * Evaluated with [<code>InformationRetrievalEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.InformationRetrievalEvaluator)
212
-
213
- | Metric | Value |
214
- |:-------------------|:-----------|
215
- | cosine_accuracy@1 | 0.8355 |
216
- | cosine_accuracy@3 | 0.9112 |
217
- | cosine_accuracy@5 | 0.9366 |
218
- | cosine_precision@1 | 0.8355 |
219
- | cosine_precision@3 | 0.3037 |
220
- | cosine_precision@5 | 0.1873 |
221
- | cosine_recall@1 | 0.8355 |
222
- | cosine_recall@3 | 0.9112 |
223
- | cosine_recall@5 | 0.9366 |
224
- | **cosine_ndcg@10** | **0.8999** |
225
- | cosine_mrr@1 | 0.8355 |
226
- | cosine_mrr@5 | 0.8752 |
227
- | cosine_mrr@10 | 0.879 |
228
- | cosine_map@100 | 0.881 |
229
-
230
  <!--
231
  ## Bias, Risks and Limitations
232
 
@@ -245,49 +146,23 @@ You can finetune this model on your own dataset.
245
 
246
  #### Unnamed Dataset
247
 
248
- * Size: 713,743 training samples
249
- * Columns: <code>anchor</code>, <code>positive</code>, and <code>negative</code>
250
  * Approximate statistics based on the first 1000 samples:
251
- | | anchor | positive | negative |
252
  |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
253
  | type | string | string | string |
254
- | details | <ul><li>min: 6 tokens</li><li>mean: 16.07 tokens</li><li>max: 53 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 16.03 tokens</li><li>max: 53 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 16.81 tokens</li><li>max: 58 tokens</li></ul> |
255
  * Samples:
256
- | anchor | positive | negative |
257
- |:-------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------|
258
- | <code>Which one is better Linux OS? Ubuntu or Mint?</code> | <code>Why do you use Linux Mint?</code> | <code>Which one is not better Linux OS ? Ubuntu or Mint ?</code> |
259
- | <code>What is flow?</code> | <code>What is flow?</code> | <code>What are flow lines?</code> |
260
- | <code>How is Trump planning to get Mexico to pay for his supposed wall?</code> | <code>How is it possible for Donald Trump to force Mexico to pay for the wall?</code> | <code>Why do we connect the positive terminal before the negative terminal to ground in a vehicle battery?</code> |
261
  * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
262
  ```json
263
  {
264
- "scale": 7.0,
265
- "similarity_fct": "cos_sim",
266
- "gather_across_devices": false
267
- }
268
- ```
269
-
270
- ### Evaluation Dataset
271
-
272
- #### Unnamed Dataset
273
-
274
- * Size: 40,000 evaluation samples
275
- * Columns: <code>anchor</code>, <code>positive</code>, and <code>negative</code>
276
- * Approximate statistics based on the first 1000 samples:
277
- | | anchor | positive | negative |
278
- |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
279
- | type | string | string | string |
280
- | details | <ul><li>min: 6 tokens</li><li>mean: 15.52 tokens</li><li>max: 74 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 15.51 tokens</li><li>max: 74 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 16.79 tokens</li><li>max: 69 tokens</li></ul> |
281
- * Samples:
282
- | anchor | positive | negative |
283
- |:-------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------|
284
- | <code>Why are all my questions on Quora marked needing improvement?</code> | <code>Why are all my questions immediately being marked as needing improvement?</code> | <code>For a post-graduate student in IIT, is it allowed to take an external scholarship as a top-up to his/her MHRD assistantship?</code> |
285
- | <code>Can blue butter fly needle with vaccum tube be reused? Is it HIV risk? . Heard the needle is too small to be reused . Had blood draw at clinic?</code> | <code>Can blue butter fly needle with vaccum tube be reused? Is it HIV risk? . Heard the needle is too small to be reused . Had blood draw at clinic?</code> | <code>Can blue butter fly needle with vaccum tube be reused not ? Is it HIV risk ? . Heard the needle is too small to be reused . Had blood draw at clinic ?</code> |
286
- | <code>Why do people still believe the world is flat?</code> | <code>Why are there still people who believe the world is flat?</code> | <code>I'm not able to buy Udemy course .it is not accepting mine and my friends debit card.my card can be used for Flipkart .how to purchase now?</code> |
287
- * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
288
- ```json
289
- {
290
- "scale": 7.0,
291
  "similarity_fct": "cos_sim",
292
  "gather_across_devices": false
293
  }
@@ -296,49 +171,36 @@ You can finetune this model on your own dataset.
296
  ### Training Hyperparameters
297
  #### Non-Default Hyperparameters
298
 
299
- - `eval_strategy`: steps
300
- - `per_device_train_batch_size`: 128
301
- - `per_device_eval_batch_size`: 128
302
- - `learning_rate`: 0.0002
303
- - `weight_decay`: 0.0001
304
- - `max_steps`: 10000
305
- - `warmup_ratio`: 0.1
306
  - `fp16`: True
307
- - `dataloader_drop_last`: True
308
- - `dataloader_num_workers`: 1
309
- - `dataloader_prefetch_factor`: 1
310
- - `load_best_model_at_end`: True
311
- - `optim`: adamw_torch
312
- - `ddp_find_unused_parameters`: False
313
- - `push_to_hub`: True
314
- - `hub_model_id`: redis/model-b-structured
315
- - `eval_on_start`: True
316
 
317
  #### All Hyperparameters
318
  <details><summary>Click to expand</summary>
319
 
320
  - `overwrite_output_dir`: False
321
  - `do_predict`: False
322
- - `eval_strategy`: steps
323
  - `prediction_loss_only`: True
324
- - `per_device_train_batch_size`: 128
325
- - `per_device_eval_batch_size`: 128
326
  - `per_gpu_train_batch_size`: None
327
  - `per_gpu_eval_batch_size`: None
328
  - `gradient_accumulation_steps`: 1
329
  - `eval_accumulation_steps`: None
330
  - `torch_empty_cache_steps`: None
331
- - `learning_rate`: 0.0002
332
- - `weight_decay`: 0.0001
333
  - `adam_beta1`: 0.9
334
  - `adam_beta2`: 0.999
335
  - `adam_epsilon`: 1e-08
336
- - `max_grad_norm`: 1.0
337
- - `num_train_epochs`: 3.0
338
- - `max_steps`: 10000
339
  - `lr_scheduler_type`: linear
340
  - `lr_scheduler_kwargs`: {}
341
- - `warmup_ratio`: 0.1
342
  - `warmup_steps`: 0
343
  - `log_level`: passive
344
  - `log_level_replica`: warning
@@ -366,14 +228,14 @@ You can finetune this model on your own dataset.
366
  - `tpu_num_cores`: None
367
  - `tpu_metrics_debug`: False
368
  - `debug`: []
369
- - `dataloader_drop_last`: True
370
- - `dataloader_num_workers`: 1
371
- - `dataloader_prefetch_factor`: 1
372
  - `past_index`: -1
373
  - `disable_tqdm`: False
374
  - `remove_unused_columns`: True
375
  - `label_names`: None
376
- - `load_best_model_at_end`: True
377
  - `ignore_data_skip`: False
378
  - `fsdp`: []
379
  - `fsdp_min_num_params`: 0
@@ -383,23 +245,23 @@ You can finetune this model on your own dataset.
383
  - `parallelism_config`: None
384
  - `deepspeed`: None
385
  - `label_smoothing_factor`: 0.0
386
- - `optim`: adamw_torch
387
  - `optim_args`: None
388
  - `adafactor`: False
389
  - `group_by_length`: False
390
  - `length_column_name`: length
391
  - `project`: huggingface
392
  - `trackio_space_id`: trackio
393
- - `ddp_find_unused_parameters`: False
394
  - `ddp_bucket_cap_mb`: None
395
  - `ddp_broadcast_buffers`: False
396
  - `dataloader_pin_memory`: True
397
  - `dataloader_persistent_workers`: False
398
  - `skip_memory_metrics`: True
399
  - `use_legacy_prediction_loop`: False
400
- - `push_to_hub`: True
401
  - `resume_from_checkpoint`: None
402
- - `hub_model_id`: redis/model-b-structured
403
  - `hub_strategy`: every_save
404
  - `hub_private_repo`: None
405
  - `hub_always_push`: False
@@ -426,65 +288,32 @@ You can finetune this model on your own dataset.
426
  - `neftune_noise_alpha`: None
427
  - `optim_target_modules`: None
428
  - `batch_eval_metrics`: False
429
- - `eval_on_start`: True
430
  - `use_liger_kernel`: False
431
  - `liger_kernel_config`: None
432
  - `eval_use_gather_object`: False
433
  - `average_tokens_across_devices`: True
434
  - `prompts`: None
435
  - `batch_sampler`: batch_sampler
436
- - `multi_dataset_batch_sampler`: proportional
437
  - `router_mapping`: {}
438
  - `learning_rate_mapping`: {}
439
 
440
  </details>
441
 
442
  ### Training Logs
443
- | Epoch | Step | Training Loss | Validation Loss | val_cosine_ndcg@10 |
444
- |:----------:|:---------:|:-------------:|:---------------:|:------------------:|
445
- | 0 | 0 | - | 3.6810 | 0.8566 |
446
- | 0.0448 | 250 | 1.5797 | 0.4480 | 0.8864 |
447
- | 0.0897 | 500 | 0.5396 | 0.4082 | 0.8901 |
448
- | 0.1345 | 750 | 0.4931 | 0.3876 | 0.8887 |
449
- | 0.1793 | 1000 | 0.4761 | 0.3822 | 0.8888 |
450
- | 0.2242 | 1250 | 0.462 | 0.3777 | 0.8899 |
451
- | 0.2690 | 1500 | 0.4452 | 0.3683 | 0.8896 |
452
- | 0.3138 | 1750 | 0.4356 | 0.3579 | 0.8899 |
453
- | 0.3587 | 2000 | 0.4303 | 0.3553 | 0.8902 |
454
- | 0.4035 | 2250 | 0.4176 | 0.3492 | 0.8915 |
455
- | 0.4484 | 2500 | 0.4118 | 0.3459 | 0.8918 |
456
- | 0.4932 | 2750 | 0.4082 | 0.3437 | 0.8929 |
457
- | 0.5380 | 3000 | 0.4017 | 0.3413 | 0.8930 |
458
- | 0.5829 | 3250 | 0.3987 | 0.3380 | 0.8930 |
459
- | 0.6277 | 3500 | 0.3955 | 0.3355 | 0.8945 |
460
- | 0.6725 | 3750 | 0.3899 | 0.3324 | 0.8945 |
461
- | 0.7174 | 4000 | 0.3885 | 0.3307 | 0.8943 |
462
- | 0.7622 | 4250 | 0.3852 | 0.3272 | 0.8944 |
463
- | 0.8070 | 4500 | 0.3798 | 0.3276 | 0.8952 |
464
- | 0.8519 | 4750 | 0.3791 | 0.3240 | 0.8958 |
465
- | 0.8967 | 5000 | 0.3762 | 0.3230 | 0.8962 |
466
- | 0.9415 | 5250 | 0.3744 | 0.3209 | 0.8966 |
467
- | 0.9864 | 5500 | 0.3706 | 0.3193 | 0.8962 |
468
- | 1.0312 | 5750 | 0.3591 | 0.3164 | 0.8964 |
469
- | 1.0760 | 6000 | 0.3541 | 0.3158 | 0.8970 |
470
- | 1.1209 | 6250 | 0.3531 | 0.3132 | 0.8968 |
471
- | 1.1657 | 6500 | 0.3516 | 0.3129 | 0.8974 |
472
- | 1.2105 | 6750 | 0.3511 | 0.3108 | 0.8973 |
473
- | 1.2554 | 7000 | 0.3494 | 0.3098 | 0.8975 |
474
- | 1.3002 | 7250 | 0.35 | 0.3086 | 0.8976 |
475
- | 1.3451 | 7500 | 0.3458 | 0.3081 | 0.8983 |
476
- | 1.3899 | 7750 | 0.3453 | 0.3072 | 0.8980 |
477
- | 1.4347 | 8000 | 0.3426 | 0.3066 | 0.8984 |
478
- | 1.4796 | 8250 | 0.3427 | 0.3042 | 0.8987 |
479
- | 1.5244 | 8500 | 0.342 | 0.3046 | 0.8992 |
480
- | 1.5692 | 8750 | 0.3404 | 0.3037 | 0.8994 |
481
- | 1.6141 | 9000 | 0.339 | 0.3027 | 0.8996 |
482
- | 1.6589 | 9250 | 0.3392 | 0.3015 | 0.8996 |
483
- | 1.7037 | 9500 | 0.3377 | 0.3012 | 0.8999 |
484
- | 1.7486 | 9750 | 0.3391 | 0.3007 | 0.8999 |
485
- | **1.7934** | **10000** | **0.3365** | **0.3004** | **0.8999** |
486
-
487
- * The bold row denotes the saved checkpoint.
488
 
489
  ### Framework Versions
490
  - Python: 3.10.18
 
5
  - feature-extraction
6
  - dense
7
  - generated_from_trainer
8
+ - dataset_size:100000
9
  - loss:MultipleNegativesRankingLoss
10
+ base_model: prajjwal1/bert-small
11
  widget:
12
+ - source_sentence: How do I calculate IQ?
13
  sentences:
14
+ - What is the easiest way to know my IQ?
15
+ - How do I calculate not IQ ?
16
+ - What are some creative and innovative business ideas with less investment in India?
17
+ - source_sentence: How can I learn martial arts in my home?
 
18
  sentences:
19
+ - How can I learn martial arts by myself?
20
+ - What are the advantages and disadvantages of investing in gold?
21
+ - Can people see that I have looked at their pictures on instagram if I am not following
22
+ them?
23
+ - source_sentence: When Enterprise picks you up do you have to take them back?
24
  sentences:
25
+ - Are there any software Training institute in Tuticorin?
26
+ - When Enterprise picks you up do you have to take them back?
27
+ - When Enterprise picks you up do them have to take youback?
28
+ - source_sentence: What are some non-capital goods?
 
 
 
29
  sentences:
30
+ - What are capital goods?
31
+ - How is the value of [math]\pi[/math] calculated?
32
+ - What are some non-capital goods?
33
+ - source_sentence: What is the QuickBooks technical support phone number in New York?
34
  sentences:
35
+ - What caused the Great Depression?
36
+ - Can I apply for PR in Canada?
37
+ - Which is the best QuickBooks Hosting Support Number in New York?
 
38
  pipeline_tag: sentence-similarity
39
  library_name: sentence-transformers
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
  ---
41
 
42
+ # SentenceTransformer based on prajjwal1/bert-small
43
 
44
+ This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [prajjwal1/bert-small](https://huggingface.co/prajjwal1/bert-small). It maps sentences & paragraphs to a 512-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
45
 
46
  ## Model Details
47
 
48
  ### Model Description
49
  - **Model Type:** Sentence Transformer
50
+ - **Base model:** [prajjwal1/bert-small](https://huggingface.co/prajjwal1/bert-small) <!-- at revision 0ec5f86f27c1a77d704439db5e01c307ea11b9d4 -->
51
  - **Maximum Sequence Length:** 128 tokens
52
+ - **Output Dimensionality:** 512 dimensions
53
  - **Similarity Function:** Cosine Similarity
54
  <!-- - **Training Dataset:** Unknown -->
55
  <!-- - **Language:** Unknown -->
 
66
  ```
67
  SentenceTransformer(
68
  (0): Transformer({'max_seq_length': 128, 'do_lower_case': False, 'architecture': 'BertModel'})
69
+ (1): Pooling({'word_embedding_dimension': 512, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
 
70
  )
71
  ```
72
 
 
85
  from sentence_transformers import SentenceTransformer
86
 
87
  # Download from the 🤗 Hub
88
+ model = SentenceTransformer("sentence_transformers_model_id")
89
  # Run inference
90
  sentences = [
91
+ 'What is the QuickBooks technical support phone number in New York?',
92
+ 'Which is the best QuickBooks Hosting Support Number in New York?',
93
+ 'Can I apply for PR in Canada?',
94
  ]
95
  embeddings = model.encode(sentences)
96
  print(embeddings.shape)
97
+ # [3, 512]
98
 
99
  # Get the similarity scores for the embeddings
100
  similarities = model.similarity(embeddings, embeddings)
101
  print(similarities)
102
+ # tensor([[1.0000, 0.8563, 0.0594],
103
+ # [0.8563, 1.0000, 0.1245],
104
+ # [0.0594, 0.1245, 1.0000]])
105
  ```
106
 
107
  <!--
 
128
  *List how the model may foreseeably be misused and address what users ought not to do with the model.*
129
  -->
130
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
  <!--
132
  ## Bias, Risks and Limitations
133
 
 
146
 
147
  #### Unnamed Dataset
148
 
149
+ * Size: 100,000 training samples
150
+ * Columns: <code>sentence_0</code>, <code>sentence_1</code>, and <code>sentence_2</code>
151
  * Approximate statistics based on the first 1000 samples:
152
+ | | sentence_0 | sentence_1 | sentence_2 |
153
  |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
154
  | type | string | string | string |
155
+ | details | <ul><li>min: 6 tokens</li><li>mean: 15.79 tokens</li><li>max: 66 tokens</li></ul> | <ul><li>min: 6 tokens</li><li>mean: 15.68 tokens</li><li>max: 66 tokens</li></ul> | <ul><li>min: 7 tokens</li><li>mean: 16.37 tokens</li><li>max: 67 tokens</li></ul> |
156
  * Samples:
157
+ | sentence_0 | sentence_1 | sentence_2 |
158
+ |:-----------------------------------------------------------------|:-----------------------------------------------------------------|:----------------------------------------------------------------------------------|
159
+ | <code>Is masturbating bad for boys?</code> | <code>Is masturbating bad for boys?</code> | <code>How harmful or unhealthy is masturbation?</code> |
160
+ | <code>Does a train engine move in reverse?</code> | <code>Does a train engine move in reverse?</code> | <code>Time moves forward, not in reverse. Doesn't that make time a vector?</code> |
161
+ | <code>What is the most badass thing anyone has ever done?</code> | <code>What is the most badass thing anyone has ever done?</code> | <code>anyone is the most badass thing Whathas ever done?</code> |
162
  * Loss: [<code>MultipleNegativesRankingLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#multiplenegativesrankingloss) with these parameters:
163
  ```json
164
  {
165
+ "scale": 20.0,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
166
  "similarity_fct": "cos_sim",
167
  "gather_across_devices": false
168
  }
 
171
  ### Training Hyperparameters
172
  #### Non-Default Hyperparameters
173
 
174
+ - `per_device_train_batch_size`: 64
175
+ - `per_device_eval_batch_size`: 64
 
 
 
 
 
176
  - `fp16`: True
177
+ - `multi_dataset_batch_sampler`: round_robin
 
 
 
 
 
 
 
 
178
 
179
  #### All Hyperparameters
180
  <details><summary>Click to expand</summary>
181
 
182
  - `overwrite_output_dir`: False
183
  - `do_predict`: False
184
+ - `eval_strategy`: no
185
  - `prediction_loss_only`: True
186
+ - `per_device_train_batch_size`: 64
187
+ - `per_device_eval_batch_size`: 64
188
  - `per_gpu_train_batch_size`: None
189
  - `per_gpu_eval_batch_size`: None
190
  - `gradient_accumulation_steps`: 1
191
  - `eval_accumulation_steps`: None
192
  - `torch_empty_cache_steps`: None
193
+ - `learning_rate`: 5e-05
194
+ - `weight_decay`: 0.0
195
  - `adam_beta1`: 0.9
196
  - `adam_beta2`: 0.999
197
  - `adam_epsilon`: 1e-08
198
+ - `max_grad_norm`: 1
199
+ - `num_train_epochs`: 3
200
+ - `max_steps`: -1
201
  - `lr_scheduler_type`: linear
202
  - `lr_scheduler_kwargs`: {}
203
+ - `warmup_ratio`: 0.0
204
  - `warmup_steps`: 0
205
  - `log_level`: passive
206
  - `log_level_replica`: warning
 
228
  - `tpu_num_cores`: None
229
  - `tpu_metrics_debug`: False
230
  - `debug`: []
231
+ - `dataloader_drop_last`: False
232
+ - `dataloader_num_workers`: 0
233
+ - `dataloader_prefetch_factor`: None
234
  - `past_index`: -1
235
  - `disable_tqdm`: False
236
  - `remove_unused_columns`: True
237
  - `label_names`: None
238
+ - `load_best_model_at_end`: False
239
  - `ignore_data_skip`: False
240
  - `fsdp`: []
241
  - `fsdp_min_num_params`: 0
 
245
  - `parallelism_config`: None
246
  - `deepspeed`: None
247
  - `label_smoothing_factor`: 0.0
248
+ - `optim`: adamw_torch_fused
249
  - `optim_args`: None
250
  - `adafactor`: False
251
  - `group_by_length`: False
252
  - `length_column_name`: length
253
  - `project`: huggingface
254
  - `trackio_space_id`: trackio
255
+ - `ddp_find_unused_parameters`: None
256
  - `ddp_bucket_cap_mb`: None
257
  - `ddp_broadcast_buffers`: False
258
  - `dataloader_pin_memory`: True
259
  - `dataloader_persistent_workers`: False
260
  - `skip_memory_metrics`: True
261
  - `use_legacy_prediction_loop`: False
262
+ - `push_to_hub`: False
263
  - `resume_from_checkpoint`: None
264
+ - `hub_model_id`: None
265
  - `hub_strategy`: every_save
266
  - `hub_private_repo`: None
267
  - `hub_always_push`: False
 
288
  - `neftune_noise_alpha`: None
289
  - `optim_target_modules`: None
290
  - `batch_eval_metrics`: False
291
+ - `eval_on_start`: False
292
  - `use_liger_kernel`: False
293
  - `liger_kernel_config`: None
294
  - `eval_use_gather_object`: False
295
  - `average_tokens_across_devices`: True
296
  - `prompts`: None
297
  - `batch_sampler`: batch_sampler
298
+ - `multi_dataset_batch_sampler`: round_robin
299
  - `router_mapping`: {}
300
  - `learning_rate_mapping`: {}
301
 
302
  </details>
303
 
304
  ### Training Logs
305
+ | Epoch | Step | Training Loss |
306
+ |:------:|:----:|:-------------:|
307
+ | 0.3199 | 500 | 0.4294 |
308
+ | 0.6398 | 1000 | 0.1268 |
309
+ | 0.9597 | 1500 | 0.1 |
310
+ | 1.2796 | 2000 | 0.0792 |
311
+ | 1.5995 | 2500 | 0.0706 |
312
+ | 1.9194 | 3000 | 0.0687 |
313
+ | 2.2393 | 3500 | 0.0584 |
314
+ | 2.5592 | 4000 | 0.057 |
315
+ | 2.8791 | 4500 | 0.0581 |
316
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
317
 
318
  ### Framework Versions
319
  - Python: 3.10.18
config.json CHANGED
@@ -1,24 +1,60 @@
1
  {
 
2
  "architectures": [
3
- "BertModel"
4
  ],
5
- "attention_probs_dropout_prob": 0.1,
6
- "classifier_dropout": null,
 
 
7
  "dtype": "float32",
8
- "hidden_act": "gelu",
9
- "hidden_dropout_prob": 0.1,
10
- "hidden_size": 384,
 
 
11
  "initializer_range": 0.02,
12
- "intermediate_size": 1536,
13
- "layer_norm_eps": 1e-12,
14
- "max_position_embeddings": 512,
15
- "model_type": "bert",
16
- "num_attention_heads": 12,
17
- "num_hidden_layers": 12,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
  "pad_token_id": 0,
19
- "position_embedding_type": "absolute",
 
 
 
 
 
20
  "transformers_version": "4.57.3",
21
- "type_vocab_size": 2,
22
  "use_cache": true,
23
- "vocab_size": 30522
24
  }
 
1
  {
2
+ "_sliding_window_pattern": 6,
3
  "architectures": [
4
+ "Gemma3TextModel"
5
  ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "attn_logit_softcapping": null,
9
+ "bos_token_id": 2,
10
  "dtype": "float32",
11
+ "eos_token_id": 1,
12
+ "final_logit_softcapping": null,
13
+ "head_dim": 256,
14
+ "hidden_activation": "gelu_pytorch_tanh",
15
+ "hidden_size": 768,
16
  "initializer_range": 0.02,
17
+ "intermediate_size": 1152,
18
+ "layer_types": [
19
+ "sliding_attention",
20
+ "sliding_attention",
21
+ "sliding_attention",
22
+ "sliding_attention",
23
+ "sliding_attention",
24
+ "full_attention",
25
+ "sliding_attention",
26
+ "sliding_attention",
27
+ "sliding_attention",
28
+ "sliding_attention",
29
+ "sliding_attention",
30
+ "full_attention",
31
+ "sliding_attention",
32
+ "sliding_attention",
33
+ "sliding_attention",
34
+ "sliding_attention",
35
+ "sliding_attention",
36
+ "full_attention",
37
+ "sliding_attention",
38
+ "sliding_attention",
39
+ "sliding_attention",
40
+ "sliding_attention",
41
+ "sliding_attention",
42
+ "full_attention"
43
+ ],
44
+ "max_position_embeddings": 2048,
45
+ "model_type": "gemma3_text",
46
+ "num_attention_heads": 3,
47
+ "num_hidden_layers": 24,
48
+ "num_key_value_heads": 1,
49
  "pad_token_id": 0,
50
+ "query_pre_attn_scalar": 256,
51
+ "rms_norm_eps": 1e-06,
52
+ "rope_local_base_freq": 10000.0,
53
+ "rope_scaling": null,
54
+ "rope_theta": 1000000.0,
55
+ "sliding_window": 257,
56
  "transformers_version": "4.57.3",
57
+ "use_bidirectional_attention": true,
58
  "use_cache": true,
59
+ "vocab_size": 262144
60
  }
eval/Information-Retrieval_evaluation_val_results.csv CHANGED
@@ -637,3 +637,24 @@ epoch,steps,cosine-Accuracy@1,cosine-Accuracy@3,cosine-Accuracy@5,cosine-Precisi
637
  1.7037302725968435,9500,0.83535,0.911475,0.93655,0.83535,0.83535,0.3038249999999999,0.911475,0.18731,0.93655,0.83535,0.8751141666666615,0.878991577380946,0.899889550569265,0.8809631243794993
638
  1.7485652797704447,9750,0.835325,0.9112,0.936325,0.835325,0.835325,0.3037333333333333,0.9112,0.18726500000000001,0.936325,0.835325,0.8750524999999947,0.8789839384920574,0.8998852971689221,0.8809643466594432
639
  1.793400286944046,10000,0.83545,0.911175,0.9366,0.83545,0.83545,0.303725,0.911175,0.18732000000000001,0.9366,0.83545,0.8751591666666616,0.8790415476190412,0.8999318372974409,0.8810239994800558
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
637
  1.7037302725968435,9500,0.83535,0.911475,0.93655,0.83535,0.83535,0.3038249999999999,0.911475,0.18731,0.93655,0.83535,0.8751141666666615,0.878991577380946,0.899889550569265,0.8809631243794993
638
  1.7485652797704447,9750,0.835325,0.9112,0.936325,0.835325,0.835325,0.3037333333333333,0.9112,0.18726500000000001,0.936325,0.835325,0.8750524999999947,0.8789839384920574,0.8998852971689221,0.8809643466594432
639
  1.793400286944046,10000,0.83545,0.911175,0.9366,0.83545,0.83545,0.303725,0.911175,0.18732000000000001,0.9366,0.83545,0.8751591666666616,0.8790415476190412,0.8999318372974409,0.8810239994800558
640
+ 0,0,0.639675,0.87685,0.9083,0.639675,0.639675,0.2922833333333333,0.87685,0.18166000000000004,0.9083,0.639675,0.7599624999999923,0.7643178273809481,0.8083956818551257,0.7670522600703508
641
+ 0.022417503586800575,250,0.82065,0.89945,0.9242,0.82065,0.82065,0.2998166666666666,0.89945,0.18483999999999998,0.9242,0.82065,0.8617137499999968,0.8657987599206315,0.8872724349019027,0.8680466843836286
642
+ 0.04483500717360115,500,0.81655,0.892425,0.91875,0.81655,0.81655,0.29747499999999993,0.892425,0.18375000000000002,0.91875,0.81655,0.8566166666666617,0.8607229662698371,0.882122156254855,0.863151001734206
643
+ 0.06725251076040172,750,0.8135,0.892075,0.917625,0.8135,0.8135,0.2973583333333333,0.892075,0.18352500000000005,0.917625,0.8135,0.8543795833333286,0.8584948015872975,0.8802034869577888,0.8609313205301325
644
+ 0.0896700143472023,1000,0.8244,0.898125,0.9234,0.8244,0.8244,0.299375,0.898125,0.18468000000000004,0.9234,0.8244,0.8632404166666602,0.8673072817460247,0.8881903001430266,0.8695682867122491
645
+ 0.11208751793400287,1250,0.825125,0.8975,0.92345,0.825125,0.825125,0.2991666666666666,0.8975,0.18469000000000002,0.92345,0.825125,0.8635195833333286,0.8675001488095195,0.8881479223773198,0.8698036797223343
646
+ 0.13450502152080343,1500,0.824825,0.89745,0.924675,0.824825,0.824825,0.29914999999999997,0.89745,0.18493500000000002,0.924675,0.824825,0.8635674999999957,0.867597232142853,0.8885981237608775,0.8699162638314769
647
+ 0.15692252510760402,1750,0.828125,0.9003,0.926725,0.828125,0.828125,0.3001,0.9003,0.18534500000000004,0.926725,0.828125,0.8663654166666617,0.8703199404761844,0.8910206909869869,0.8725571487400507
648
+ 0.1793400286944046,2000,0.001325,0.0084,0.022275,0.001325,0.001325,0.0028,0.0084,0.004455,0.022275,0.001325,0.007867083333333311,0.010877668650793688,0.0186950894860071,0.02490441055844739
649
+ 0.20175753228120516,2250,0.830025,0.902425,0.92865,0.830025,0.830025,0.3008083333333333,0.902425,0.18573,0.92865,0.830025,0.868462916666662,0.8723177777777715,0.8928818077159327,0.8744904419955204
650
+ 0.22417503586800575,2500,0.83385,0.905775,0.931825,0.83385,0.83385,0.30192499999999994,0.905775,0.18636500000000006,0.931825,0.83385,0.871977083333331,0.8756928670634883,0.8959559117645085,0.8778776795620055
651
+ 0.2465925394548063,2750,0.0001,0.0752,0.174675,0.0001,0.0001,0.025066666666666664,0.0752,0.034935,0.174675,0.0001,0.05248083333333305,0.055306488095238034,0.08872108035834274,0.06968419084794351
652
+ 0.26901004304160686,3000,2.5e-05,0.072325,0.142725,2.5e-05,2.5e-05,0.024108333333333332,0.072325,0.028544999999999997,0.142725,2.5e-05,0.04569916666666664,0.05269701388888883,0.08544234767952956,0.06723497471913427
653
+ 0.2914275466284075,3250,2.5e-05,0.072325,0.142725,2.5e-05,2.5e-05,0.024108333333333332,0.072325,0.028544999999999997,0.142725,2.5e-05,0.04569916666666664,0.05269701388888883,0.08544234767952956,0.06723497471913427
654
+ 0.31384505021520803,3500,2.5e-05,0.072325,0.142725,2.5e-05,2.5e-05,0.024108333333333332,0.072325,0.028544999999999997,0.142725,2.5e-05,0.04569916666666664,0.05269701388888883,0.08544234767952956,0.06723497471913427
655
+ 0.3362625538020086,3750,2.5e-05,0.072325,0.142725,2.5e-05,2.5e-05,0.024108333333333332,0.072325,0.028544999999999997,0.142725,2.5e-05,0.04569916666666664,0.05269701388888883,0.08544234767952956,0.06723497471913427
656
+ 0.3586800573888092,4000,2.5e-05,0.072325,0.142725,2.5e-05,2.5e-05,0.024108333333333332,0.072325,0.028544999999999997,0.142725,2.5e-05,0.04569916666666664,0.05269701388888883,0.08544234767952956,0.06723497471913427
657
+ 0.38109756097560976,4250,0.0,2.5e-05,7.5e-05,0.0,0.0,8.333333333333332e-06,2.5e-05,1.5000000000000002e-05,7.5e-05,0.0,2.3749999999999998e-05,2.6875e-05,4.409809989162802e-05,5.524355841383588e-05
658
+ 0.4035150645624103,4500,0.0,2.5e-05,7.5e-05,0.0,0.0,8.333333333333332e-06,2.5e-05,1.5000000000000002e-05,7.5e-05,0.0,2.3749999999999998e-05,2.6875e-05,4.409809989162802e-05,5.524355841383588e-05
659
+ 0.4259325681492109,4750,0.0,2.5e-05,7.5e-05,0.0,0.0,8.333333333333332e-06,2.5e-05,1.5000000000000002e-05,7.5e-05,0.0,2.3749999999999998e-05,2.6875e-05,4.409809989162802e-05,5.524355841383588e-05
660
+ 0.4483500717360115,5000,0.0,2.5e-05,7.5e-05,0.0,0.0,8.333333333333332e-06,2.5e-05,1.5000000000000002e-05,7.5e-05,0.0,2.3749999999999998e-05,2.6875e-05,4.409809989162802e-05,5.524355841383588e-05
final_metrics.json CHANGED
@@ -1,16 +1,16 @@
1
  {
2
- "val_cosine_accuracy@1": 0.83295,
3
- "val_cosine_accuracy@3": 0.9071,
4
- "val_cosine_accuracy@5": 0.9329,
5
- "val_cosine_precision@1": 0.83295,
6
- "val_cosine_precision@3": 0.3023666666666666,
7
- "val_cosine_precision@5": 0.18658000000000005,
8
- "val_cosine_recall@1": 0.83295,
9
- "val_cosine_recall@3": 0.9071,
10
- "val_cosine_recall@5": 0.9329,
11
- "val_cosine_ndcg@10": 0.8970951855878305,
12
- "val_cosine_mrr@1": 0.83295,
13
- "val_cosine_mrr@5": 0.872013749999996,
14
- "val_cosine_mrr@10": 0.8760916468253912,
15
- "val_cosine_map@100": 0.8781372459990227
16
  }
 
1
  {
2
+ "val_cosine_accuracy@1": 0.83545,
3
+ "val_cosine_accuracy@3": 0.911175,
4
+ "val_cosine_accuracy@5": 0.9366,
5
+ "val_cosine_precision@1": 0.83545,
6
+ "val_cosine_precision@3": 0.303725,
7
+ "val_cosine_precision@5": 0.18732000000000001,
8
+ "val_cosine_recall@1": 0.83545,
9
+ "val_cosine_recall@3": 0.911175,
10
+ "val_cosine_recall@5": 0.9366,
11
+ "val_cosine_ndcg@10": 0.8999318372974409,
12
+ "val_cosine_mrr@1": 0.83545,
13
+ "val_cosine_mrr@5": 0.8751591666666616,
14
+ "val_cosine_mrr@10": 0.8790415476190412,
15
+ "val_cosine_map@100": 0.8810239994800558
16
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bb34c96d597d832aa5edd5352a2499b5d5c6df76a439f4cead08dce5b23e26ed
3
- size 133462128
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:545b247747446f3974b25dce1c7156ba3f4d45cf47a42bc853d38de5dc798cfe
3
+ size 1211486072
modules.json CHANGED
@@ -10,11 +10,5 @@
10
  "name": "1",
11
  "path": "1_Pooling",
12
  "type": "sentence_transformers.models.Pooling"
13
- },
14
- {
15
- "idx": 2,
16
- "name": "2",
17
- "path": "2_Normalize",
18
- "type": "sentence_transformers.models.Normalize"
19
  }
20
  ]
 
10
  "name": "1",
11
  "path": "1_Pooling",
12
  "type": "sentence_transformers.models.Pooling"
 
 
 
 
 
 
13
  }
14
  ]
special_tokens_map.json CHANGED
@@ -1,34 +1,30 @@
1
  {
2
- "cls_token": {
3
- "content": "[CLS]",
 
4
  "lstrip": false,
5
  "normalized": false,
6
  "rstrip": false,
7
  "single_word": false
8
  },
9
- "mask_token": {
10
- "content": "[MASK]",
 
11
  "lstrip": false,
12
  "normalized": false,
13
  "rstrip": false,
14
  "single_word": false
15
  },
 
16
  "pad_token": {
17
- "content": "[PAD]",
18
- "lstrip": false,
19
- "normalized": false,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
- "sep_token": {
24
- "content": "[SEP]",
25
  "lstrip": false,
26
  "normalized": false,
27
  "rstrip": false,
28
  "single_word": false
29
  },
30
  "unk_token": {
31
- "content": "[UNK]",
32
  "lstrip": false,
33
  "normalized": false,
34
  "rstrip": false,
 
1
  {
2
+ "boi_token": "<start_of_image>",
3
+ "bos_token": {
4
+ "content": "<bos>",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
8
  "single_word": false
9
  },
10
+ "eoi_token": "<end_of_image>",
11
+ "eos_token": {
12
+ "content": "<eos>",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
16
  "single_word": false
17
  },
18
+ "image_token": "<image_soft_token>",
19
  "pad_token": {
20
+ "content": "<pad>",
 
 
 
 
 
 
 
21
  "lstrip": false,
22
  "normalized": false,
23
  "rstrip": false,
24
  "single_word": false
25
  },
26
  "unk_token": {
27
+ "content": "<unk>",
28
  "lstrip": false,
29
  "normalized": false,
30
  "rstrip": false,
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
The diff for this file is too large to render. See raw diff
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:aa697a5587cabb6e578299c46e1745cc69a0c459a2f99c151e144c2158d43a4e
3
  size 6161
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0550728e68dcb4297a5ed369ce43e645f35ae6cb79deed45333999adba8e4c4e
3
  size 6161