Update modeling_super_linear.py
Browse files- modeling_super_linear.py +5 -4
modeling_super_linear.py
CHANGED
|
@@ -614,14 +614,15 @@ class SuperLinearForCausalLM(PreTrainedModel, GenerationMixin):
|
|
| 614 |
|
| 615 |
|
| 616 |
|
| 617 |
-
|
| 618 |
-
x_enc = self.fourier_interp_dim1(x_enc)
|
| 619 |
-
|
| 620 |
|
| 621 |
|
| 622 |
# backbone returns (B, pred_len, C)
|
|
|
|
| 623 |
preds = self.backbone(x_enc)
|
| 624 |
-
|
| 625 |
return CausalLMOutputWithCrossAttentions(loss=None,logits=preds,past_key_values=None,hidden_states=None,attentions=None,)
|
| 626 |
|
| 627 |
|
|
|
|
| 614 |
|
| 615 |
|
| 616 |
|
| 617 |
+
if x_enc.shape[1] < 512:
|
| 618 |
+
x_enc = self.fourier_interp_dim1(x_enc)
|
| 619 |
+
x_enc = self.revin_layer(x_enc, 'norm')
|
| 620 |
|
| 621 |
|
| 622 |
# backbone returns (B, pred_len, C)
|
| 623 |
+
|
| 624 |
preds = self.backbone(x_enc)
|
| 625 |
+
preds = self.revin_layer(preds, 'denorm')
|
| 626 |
return CausalLMOutputWithCrossAttentions(loss=None,logits=preds,past_key_values=None,hidden_states=None,attentions=None,)
|
| 627 |
|
| 628 |
|