Upload modeling_bert.py
Browse files- modeling_bert.py +6 -6
modeling_bert.py
CHANGED
@@ -27,12 +27,12 @@ from packaging import version
|
|
27 |
from torch import nn
|
28 |
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
|
29 |
|
30 |
-
from
|
31 |
-
from
|
32 |
_prepare_4d_attention_mask_for_sdpa,
|
33 |
_prepare_4d_causal_attention_mask_for_sdpa,
|
34 |
)
|
35 |
-
from
|
36 |
BaseModelOutputWithPastAndCrossAttentions,
|
37 |
BaseModelOutputWithPoolingAndCrossAttentions,
|
38 |
CausalLMOutputWithCrossAttentions,
|
@@ -43,9 +43,9 @@ from ...modeling_outputs import (
|
|
43 |
SequenceClassifierOutput,
|
44 |
TokenClassifierOutput,
|
45 |
)
|
46 |
-
from
|
47 |
-
from
|
48 |
-
from
|
49 |
ModelOutput,
|
50 |
add_code_sample_docstrings,
|
51 |
add_start_docstrings,
|
|
|
27 |
from torch import nn
|
28 |
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
|
29 |
|
30 |
+
from transformers.activations import ACT2FN
|
31 |
+
from transformers.modeling_attn_mask_utils import (
|
32 |
_prepare_4d_attention_mask_for_sdpa,
|
33 |
_prepare_4d_causal_attention_mask_for_sdpa,
|
34 |
)
|
35 |
+
from transformers.modeling_outputs import (
|
36 |
BaseModelOutputWithPastAndCrossAttentions,
|
37 |
BaseModelOutputWithPoolingAndCrossAttentions,
|
38 |
CausalLMOutputWithCrossAttentions,
|
|
|
43 |
SequenceClassifierOutput,
|
44 |
TokenClassifierOutput,
|
45 |
)
|
46 |
+
from transformers.modeling_utils import PreTrainedModel
|
47 |
+
from transformers.pytorch_utils import apply_chunking_to_forward, find_pruneable_heads_and_indices, prune_linear_layer
|
48 |
+
from transformers.utils import (
|
49 |
ModelOutput,
|
50 |
add_code_sample_docstrings,
|
51 |
add_start_docstrings,
|