Skip to content

Commit 69b267c

Browse files
committed
remove useless code
Signed-off-by: Liu, Kaixuan <[email protected]>
1 parent 0f03d82 commit 69b267c

File tree

1 file changed

+0
-6
lines changed

1 file changed

+0
-6
lines changed

backends/python/server/text_embeddings_server/models/jinaBert_model.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,9 @@
22
import math
33
from torch import nn
44
import torch.nn.functional as F
5-
from torch.nn.functional import scaled_dot_product_attention
65
from pathlib import Path
76
from typing import Type, List, Optional, Union, Tuple
87
from transformers import AutoConfig, PretrainedConfig
9-
from transformers.activations import ACT2FN
108
from transformers.modeling_outputs import BaseModelOutputWithPastAndCrossAttentions
119
from opentelemetry import trace
1210
from safetensors import safe_open
@@ -39,7 +37,6 @@ def __init__(
3937
classifier_dropout=None,
4038
feed_forward_type="original",
4139
emb_pooler=None,
42-
attn_implementation=None,
4340
**kwargs,
4441
):
4542
super().__init__(pad_token_id=pad_token_id, **kwargs)
@@ -244,9 +241,6 @@ def forward(
244241
self.layerNorm_bias,
245242
eps=self.config.layer_norm_eps,
246243
)
247-
# hidden_states = F.linear(
248-
# hidden_states, self.layerNorm_weight, self.layerNorm_bias
249-
# )
250244
return hidden_states
251245

252246

0 commit comments

Comments
 (0)