add left padding
Browse files
README.md
CHANGED
|
@@ -27,7 +27,7 @@ MoD-Embedding is a text embedding model designed for semantic search and retriev
|
|
| 27 |
- **Max Sequence Length**: 32,768 tokens
|
| 28 |
- **Embedding Dimension**: 2560
|
| 29 |
- **Languages**: English, Chinese, and multilingual support
|
| 30 |
-
- **Training Method**: LoRA fine-tuning
|
| 31 |
|
| 32 |
## Usage
|
| 33 |
|
|
@@ -61,8 +61,8 @@ from transformers import AutoModel, AutoTokenizer
|
|
| 61 |
import torch
|
| 62 |
import torch.nn.functional as F
|
| 63 |
|
| 64 |
-
tokenizer = AutoTokenizer.from_pretrained("bflhc/MoD-Embedding")
|
| 65 |
-
model = AutoModel.from_pretrained("bflhc/MoD-Embedding"
|
| 66 |
model.eval()
|
| 67 |
|
| 68 |
def encode(texts):
|
|
|
|
| 27 |
- **Max Sequence Length**: 32,768 tokens
|
| 28 |
- **Embedding Dimension**: 2560
|
| 29 |
- **Languages**: English, Chinese, and multilingual support
|
| 30 |
+
- **Training Method**: LoRA fine-tuning
|
| 31 |
|
| 32 |
## Usage
|
| 33 |
|
|
|
|
| 61 |
import torch
|
| 62 |
import torch.nn.functional as F
|
| 63 |
|
| 64 |
+
tokenizer = AutoTokenizer.from_pretrained("bflhc/MoD-Embedding", padding_side='left')
|
| 65 |
+
model = AutoModel.from_pretrained("bflhc/MoD-Embedding")
|
| 66 |
model.eval()
|
| 67 |
|
| 68 |
def encode(texts):
|