Update modeling_c2llm.py
Browse files- modeling_c2llm.py +2 -2
modeling_c2llm.py
CHANGED
|
@@ -242,7 +242,7 @@ class C2LLMForEmbedding(C2LLMModel):
|
|
| 242 |
qwen_cfg = Qwen2Config.from_dict(config.to_dict())
|
| 243 |
self.plm_model = AutoModelForCausalLM.from_config(qwen_cfg)
|
| 244 |
self.embedding_method = config.embedding_method
|
| 245 |
-
self.inf_seq_length =
|
| 246 |
self.padding_side = config.padding_side
|
| 247 |
|
| 248 |
self.emb_dim = self.plm_model.model.embed_tokens.weight.size(1)
|
|
@@ -465,7 +465,7 @@ class C2LLMForEmbedding(C2LLMModel):
|
|
| 465 |
convert_to_numpy: bool = False,
|
| 466 |
convert_to_tensor: bool = True,
|
| 467 |
show_progress_bar: bool = True,
|
| 468 |
-
max_seq_length: int =
|
| 469 |
device: Optional[str] = None,
|
| 470 |
**kwargs: Any
|
| 471 |
):
|
|
|
|
| 242 |
qwen_cfg = Qwen2Config.from_dict(config.to_dict())
|
| 243 |
self.plm_model = AutoModelForCausalLM.from_config(qwen_cfg)
|
| 244 |
self.embedding_method = config.embedding_method
|
| 245 |
+
self.inf_seq_length = 2048
|
| 246 |
self.padding_side = config.padding_side
|
| 247 |
|
| 248 |
self.emb_dim = self.plm_model.model.embed_tokens.weight.size(1)
|
|
|
|
| 465 |
convert_to_numpy: bool = False,
|
| 466 |
convert_to_tensor: bool = True,
|
| 467 |
show_progress_bar: bool = True,
|
| 468 |
+
max_seq_length: int = 2048,
|
| 469 |
device: Optional[str] = None,
|
| 470 |
**kwargs: Any
|
| 471 |
):
|