Skip to content

Commit f7c4213

Browse files
sanchit-gandhielusenji
authored andcommitted
Replace deprecated logger.warn with warning (huggingface#16876)
1 parent c3f1305 commit f7c4213

File tree

3 files changed

+3
-3
lines changed

3 files changed

+3
-3
lines changed

examples/research_projects/quantization-qdqbert/quant_trainer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -269,7 +269,7 @@ def set_quantizer(name, mod, quantizer, k, v):
269269
assert hasattr(quantizer_mod, k)
270270
setattr(quantizer_mod, k, v)
271271
else:
272-
logger.warn(f"{name} has no {quantizer}")
272+
logger.warning(f"{name} has no {quantizer}")
273273

274274

275275
def set_quantizers(name, mod, which="both", **kwargs):

src/transformers/configuration_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -306,7 +306,7 @@ def __init__(self, **kwargs):
306306
if self.id2label is not None:
307307
num_labels = kwargs.pop("num_labels", None)
308308
if num_labels is not None and len(self.id2label) != num_labels:
309-
logger.warn(
309+
logger.warning(
310310
f"You passed along `num_labels={num_labels}` with an incompatible id to label map: "
311311
f"{self.id2label}. The number of labels wil be overwritten to {self.num_labels}."
312312
)

src/transformers/modeling_flax_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -641,7 +641,7 @@ def from_pretrained(
641641
unexpected_keys = set(state.keys()) - model.required_params
642642

643643
if missing_keys and not _do_init:
644-
logger.warn(
644+
logger.warning(
645645
f"The checkpoint {pretrained_model_name_or_path} is missing required keys: {missing_keys}. "
646646
f"Make sure to call model.init_weights to initialize the missing weights."
647647
)

0 commit comments

Comments
 (0)