Skip to content

Commit 2c59e39

Browse files
sijunhewj-Mcat
andauthored
[PretrainedConfig] Bring back deprecation warning and fix redundant warnings (#4264)
* commits * wip * remove all hasattr * codegen * turn on test * add standard config map * standard config map * Update paddlenlp/transformers/bert/configuration.py Co-authored-by: 骑马小猫 <[email protected]> Co-authored-by: 骑马小猫 <[email protected]>
1 parent 7b0c59d commit 2c59e39

File tree

19 files changed

+53
-78
lines changed

19 files changed

+53
-78
lines changed

paddlenlp/transformers/bart/modeling.py

Lines changed: 3 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,7 @@
2323
from paddle import Tensor
2424
from paddle.nn import Embedding, Layer, MultiHeadAttention
2525

26-
from paddlenlp.utils.env import CONFIG_NAME
27-
26+
from ...utils.env import CONFIG_NAME
2827
from ...utils.log import logger
2928
from .. import PretrainedModel, register_base_model
3029
from ..model_outputs import (
@@ -92,7 +91,7 @@ def init_weights(self, layer):
9291
layer.weight.set_value(
9392
paddle.tensor.normal(
9493
mean=0.0,
95-
std=self.init_std if hasattr(self, "init_std") else self.bart.config["init_std"],
94+
std=self.config.init_std,
9695
shape=layer.weight.shape,
9796
)
9897
)
@@ -1127,7 +1126,4 @@ def __getattr__(self, name):
11271126
try:
11281127
return super().__getattr__(name)
11291128
except AttributeError:
1130-
try:
1131-
return getattr(getattr(self, self.base_model_prefix), name)
1132-
except AttributeError:
1133-
return getattr(getattr(self, self.base_model_prefix).config, name)
1129+
return getattr(getattr(self, self.base_model_prefix), name)

paddlenlp/transformers/bert/configuration.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@
1515
from __future__ import annotations
1616

1717
from typing import Dict
18-
from paddlenlp.transformers.configuration_utils import PretrainedConfig, attribute_map
18+
19+
from paddlenlp.transformers.configuration_utils import PretrainedConfig
1920

2021
__all__ = ["BERT_PRETRAINED_INIT_CONFIGURATION", "BertConfig", "BERT_PRETRAINED_RESOURCE_FILES_MAP"]
2122

@@ -364,7 +365,7 @@ class BertConfig(PretrainedConfig):
364365
>>> configuration = model.config
365366
```"""
366367
model_type = "bert"
367-
attribute_map: Dict[str, str] = {"num_classes": "num_labels", "dropout": "classifier_dropout"}
368+
standard_config_map: Dict[str, str] = {"dropout": "classifier_dropout"}
368369
pretrained_init_configuration = BERT_PRETRAINED_INIT_CONFIGURATION
369370

370371
def __init__(

paddlenlp/transformers/bert/modeling.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@
3030

3131
from paddlenlp.transformers.model_utils import PretrainedModel, register_base_model
3232

33+
from ...utils.env import CONFIG_NAME
3334
from ..model_outputs import (
3435
BaseModelOutputWithPoolingAndCrossAttentions,
3536
MaskedLMOutput,
@@ -138,7 +139,7 @@ class BertPretrainedModel(PretrainedModel):
138139
See :class:`~paddlenlp.transformers.model_utils.PretrainedModel` for more details.
139140
"""
140141

141-
model_config_file = "config.json"
142+
model_config_file = CONFIG_NAME
142143
config_class = BertConfig
143144
resource_files_names = {"model_state": "model_state.pdparams"}
144145
base_model_prefix = "bert"
@@ -155,9 +156,7 @@ def init_weights(self, layer):
155156
layer.weight.set_value(
156157
paddle.tensor.normal(
157158
mean=0.0,
158-
std=self.initializer_range
159-
if hasattr(self, "initializer_range")
160-
else self.config.initializer_range,
159+
std=self.config.initializer_range,
161160
shape=layer.weight.shape,
162161
)
163162
)

paddlenlp/transformers/codegen/modeling.py

Lines changed: 4 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,7 @@
2121
from paddle import Tensor
2222
from paddle.nn import Layer
2323

24-
from paddlenlp.utils.env import CONFIG_NAME
25-
24+
from ...utils.env import CONFIG_NAME
2625
from ...utils.log import logger
2726
from .. import PretrainedModel, register_base_model
2827
from ..model_outputs import (
@@ -295,16 +294,14 @@ def init_weights(self, layer):
295294
layer.weight.set_value(
296295
paddle.tensor.normal(
297296
mean=0.0,
298-
std=self.initializer_range
299-
if hasattr(self, "initializer_range")
300-
else self.transformer.config.initializer_range,
297+
std=self.config.initializer_range,
301298
shape=layer.weight.shape,
302299
)
303300
)
304301
elif isinstance(layer, nn.LayerNorm):
305302
layer.bias.set_value(paddle.zeros_like(layer.bias))
306303
layer.weight.set_value(paddle.full_like(layer.weight, 1.0))
307-
layer._epsilon = getattr(self, "layer_norm_epsilon", 1e-05)
304+
layer._epsilon = self.config.layer_norm_epsilon
308305
if isinstance(layer, nn.Linear) and layer.bias is not None:
309306
layer.bias.set_value(paddle.zeros_like(layer.bias))
310307

@@ -696,7 +693,4 @@ def __getattr__(self, name):
696693
try:
697694
return super().__getattr__(name)
698695
except AttributeError:
699-
try:
700-
return getattr(getattr(self, self.base_model_prefix), name)
701-
except AttributeError:
702-
return getattr(getattr(self, self.base_model_prefix).config, name)
696+
return getattr(getattr(self, self.base_model_prefix), name)

paddlenlp/transformers/configuration_utils.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -441,21 +441,25 @@ class PretrainedConfig:
441441
# global attribute mapping
442442
attribute_map: Dict[str, str] = {"num_classes": "num_labels"}
443443

444-
# map hf attribute to paddle attribute
445-
# { "standard_field": "paddle_field", ... }
444+
# model-specific attribute map from hf attribute to paddle attribute
445+
# { "paddle_field": "standard_field", ... }
446446
standard_config_map: Dict[str, str] = {}
447447

448448
_auto_class: Optional[str] = None
449449

450450
def __setattr__(self, key, value):
451451
if key in super().__getattribute__("attribute_map"):
452452
key = super().__getattribute__("attribute_map")[key]
453+
elif key in super().__getattribute__("standard_config_map"):
454+
key = super().__getattribute__("standard_config_map")[key]
453455
super().__setattr__(key, value)
454456
assert hasattr(self, key)
455457

456458
def __getattribute__(self, key):
457459
if key != "attribute_map" and key in super().__getattribute__("attribute_map"):
458460
key = super().__getattribute__("attribute_map")[key]
461+
elif key != "standard_config_map" and key in super().__getattribute__("standard_config_map"):
462+
key = super().__getattribute__("standard_config_map")[key]
459463
return super().__getattribute__(key)
460464

461465
def __getitem__(self, key):

paddlenlp/transformers/ernie/configuration.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1065,7 +1065,7 @@ class ErnieConfig(PretrainedConfig):
10651065
>>> configuration = model.config
10661066
```"""
10671067
model_type = "ernie"
1068-
attribute_map: Dict[str, str] = {"num_classes": "num_labels", "dropout": "classifier_dropout"}
1068+
standard_config_map: Dict[str, str] = {"dropout": "classifier_dropout"}
10691069
pretrained_init_configuration = ERNIE_PRETRAINED_INIT_CONFIGURATION
10701070

10711071
def __init__(

paddlenlp/transformers/ernie/modeling.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
import paddle.nn.functional as F
2121
from paddle import Tensor
2222

23+
from ...utils.env import CONFIG_NAME
2324
from .. import PretrainedModel, register_base_model
2425
from ..model_outputs import (
2526
BaseModelOutputWithPoolingAndCrossAttentions,
@@ -148,7 +149,7 @@ class ErniePretrainedModel(PretrainedModel):
148149
149150
"""
150151

151-
model_config_file = "model_config.json"
152+
model_config_file = CONFIG_NAME
152153
config_class = ErnieConfig
153154
resource_files_names = {"model_state": "model_state.pdparams"}
154155
base_model_prefix = "ernie"
@@ -165,9 +166,7 @@ def init_weights(self, layer):
165166
layer.weight.set_value(
166167
paddle.tensor.normal(
167168
mean=0.0,
168-
std=self.initializer_range
169-
if hasattr(self, "initializer_range")
170-
else self.ernie.config["initializer_range"],
169+
std=self.config.initializer_range,
171170
shape=layer.weight.shape,
172171
)
173172
)

paddlenlp/transformers/ernie_layout/configuration.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@ class ErnieLayoutConfig(PretrainedConfig):
150150
>>> configuration = model.config
151151
```"""
152152
model_type = "ernie_layout"
153-
attribute_map: Dict[str, str] = {"num_classes": "num_labels", "dropout": "classifier_dropout"}
153+
standard_config_map: Dict[str, str] = {"dropout": "classifier_dropout"}
154154
pretrained_init_configuration = ERNIE_LAYOUT_PRETRAINED_INIT_CONFIGURATION
155155

156156
def __init__(

paddlenlp/transformers/ernie_layout/modeling.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323

2424
from paddlenlp.utils.log import logger
2525

26+
from ...utils.env import CONFIG_NAME
2627
from .. import PretrainedModel, register_base_model
2728
from .configuration import (
2829
ERNIE_LAYOUT_PRETRAINED_INIT_CONFIGURATION,
@@ -171,7 +172,7 @@ def forward(self, input_ids, bbox=None, token_type_ids=None, position_ids=None):
171172

172173

173174
class ErnieLayoutPretrainedModel(PretrainedModel):
174-
model_config_file = "config.json"
175+
model_config_file = CONFIG_NAME
175176
pretrained_init_configuration = ERNIE_LAYOUT_PRETRAINED_INIT_CONFIGURATION
176177
pretrained_resource_files_map = ERNIE_LAYOUT_PRETRAINED_RESOURCE_FILES_MAP
177178
base_model_prefix = "ernie_layout"
@@ -184,9 +185,7 @@ def init_weights(self, layer):
184185
layer.weight.set_value(
185186
paddle.tensor.normal(
186187
mean=0.0,
187-
std=self.pretrained_init_configuration["initializer_range"]
188-
if "initializer_range" in self.pretrained_init_configuration
189-
else 0.02,
188+
std=self.config.initializer_range,
190189
shape=layer.weight.shape,
191190
)
192191
)

paddlenlp/transformers/ernie_m/configuration.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,7 @@ class ErnieMConfig(PretrainedConfig):
144144
>>> configuration = model.config
145145
```"""
146146
model_type = "ernie_m"
147-
attribute_map: Dict[str, str] = {"num_classes": "num_labels", "dropout": "classifier_dropout"}
147+
standard_config_map: Dict[str, str] = {"dropout": "classifier_dropout"}
148148
pretrained_init_configuration = ERNIE_M_PRETRAINED_INIT_CONFIGURATION
149149

150150
def __init__(

0 commit comments

Comments
 (0)