Fix import
Browse files- modeling_eurobert.py +9 -9
modeling_eurobert.py
CHANGED
|
@@ -26,15 +26,15 @@ import torch
|
|
| 26 |
from torch import nn
|
| 27 |
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
|
| 28 |
|
| 29 |
-
from
|
| 30 |
-
from
|
| 31 |
-
from
|
| 32 |
-
from
|
| 33 |
-
from
|
| 34 |
-
from
|
| 35 |
-
from
|
| 36 |
-
from
|
| 37 |
-
from
|
| 38 |
from .configuration_eurobert import EuroBertConfig
|
| 39 |
|
| 40 |
|
|
|
|
| 26 |
from torch import nn
|
| 27 |
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
|
| 28 |
|
| 29 |
+
from transformers.activations import ACT2FN
|
| 30 |
+
from transformers.cache_utils import Cache, StaticCache
|
| 31 |
+
from transformers.modeling_attn_mask_utils import AttentionMaskConverter
|
| 32 |
+
from transformers.modeling_flash_attention_utils import FlashAttentionKwargs
|
| 33 |
+
from transformers.modeling_outputs import BaseModelOutput, BaseModelOutputWithPast, MaskedLMOutput, SequenceClassifierOutput
|
| 34 |
+
from transformers.modeling_rope_utils import ROPE_INIT_FUNCTIONS
|
| 35 |
+
from transformers.modeling_utils import ALL_ATTENTION_FUNCTIONS, PreTrainedModel
|
| 36 |
+
from transformers.processing_utils import Unpack
|
| 37 |
+
from transformers.utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging
|
| 38 |
from .configuration_eurobert import EuroBertConfig
|
| 39 |
|
| 40 |
|