Skip to content

Commit b04ab25

Browse files
committed
activations
1 parent 3549460 commit b04ab25

File tree

2 files changed

+10
-20
lines changed

2 files changed

+10
-20
lines changed

paddlenlp/transformers/convbert/modeling.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,9 @@
1717
import paddle.nn.functional as F
1818
from paddle import tensor
1919
from paddle.nn import Layer
20-
from ..electra.modeling import get_activation
20+
2121
from .. import PretrainedModel, register_base_model
22+
from ..activations import get_activation
2223

2324
__all__ = [
2425
"ConvBertModel",

paddlenlp/transformers/roformer/modeling.py

Lines changed: 8 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -14,23 +14,24 @@
1414
# limitations under the License.
1515

1616
from typing import Optional, Tuple
17-
from paddle import Tensor
17+
1818
import paddle
1919
import paddle.nn as nn
20+
from paddle import Tensor
21+
from paddle.common_ops_import import convert_dtype
2022

2123
from .. import PretrainedModel, register_base_model
22-
from ..albert.modeling import get_activation
24+
from ..activations import get_activation
2325
from ..model_outputs import (
2426
BaseModelOutputWithPoolingAndCrossAttentions,
27+
CausalLMOutputWithCrossAttentions,
28+
MaskedLMOutput,
29+
MultipleChoiceModelOutput,
30+
QuestionAnsweringModelOutput,
2531
SequenceClassifierOutput,
2632
TokenClassifierOutput,
27-
QuestionAnsweringModelOutput,
28-
MultipleChoiceModelOutput,
29-
MaskedLMOutput,
30-
CausalLMOutputWithCrossAttentions,
3133
tuple_output,
3234
)
33-
from paddle.common_ops_import import convert_dtype
3435

3536
__all__ = [
3637
"RoFormerModel",
@@ -712,18 +713,6 @@ def forward(
712713
attentions=encoder_outputs.attentions,
713714
)
714715

715-
def get_input_embeddings(self) -> nn.Embedding:
716-
return self.embeddings.word_embeddings
717-
718-
def set_input_embeddings(self, embedding: nn.Embedding):
719-
self.embeddings.word_embeddings = embedding
720-
721-
def get_input_embeddings(self) -> nn.Embedding:
722-
return self.embeddings.word_embeddings
723-
724-
def set_input_embeddings(self, embedding: nn.Embedding):
725-
self.embeddings.word_embeddings = embedding
726-
727716

728717
class RoFormerForQuestionAnswering(RoFormerPretrainedModel):
729718
r"""

0 commit comments

Comments
 (0)