Skip to content

Commit 05ceb78

Browse files
author
Sicara
committed
lint : fix F541 f-string with no placeholder
1 parent 3ff0b08 commit 05ceb78

File tree

3 files changed

+6
-6
lines changed

3 files changed

+6
-6
lines changed

snorkel/classification/training/loggers/checkpointer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@ def checkpoint(
124124
elif not self.checkpoint_condition_met and iteration >= self.checkpoint_runway:
125125
self.checkpoint_condition_met = True
126126
logging.info(
127-
f"checkpoint_runway condition has been met. Start checkpointing."
127+
"checkpoint_runway condition has been met. Start checkpointing."
128128
)
129129

130130
checkpoint_path = f"{self.checkpoint_dir}/checkpoint_{iteration}.pth"
@@ -187,7 +187,7 @@ def load_best_model(self, model: MultitaskClassifier) -> MultitaskClassifier:
187187
"""Load the best model from the checkpoint."""
188188
metric = list(self.checkpoint_metric.keys())[0]
189189
if metric not in self.best_metric_dict: # pragma: no cover
190-
logging.info(f"No best model found, use the original model.")
190+
logging.info("No best model found, use the original model.")
191191
else:
192192
# Load the best model of checkpoint_metric
193193
best_model_path = (

snorkel/classification/training/trainer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,7 @@ def fit(
179179
# Set to training mode
180180
model.train()
181181

182-
logging.info(f"Start training...")
182+
logging.info("Start training...")
183183

184184
self.metrics: Dict[str, float] = dict()
185185
self._reset_losses()
@@ -372,7 +372,7 @@ def _set_warmup_scheduler(self) -> None:
372372
if self.config.lr_scheduler_config.warmup_steps:
373373
warmup_steps = self.config.lr_scheduler_config.warmup_steps
374374
if warmup_steps < 0:
375-
raise ValueError(f"warmup_steps much greater or equal than 0.")
375+
raise ValueError("warmup_steps much greater or equal than 0.")
376376
warmup_unit = self.config.lr_scheduler_config.warmup_unit
377377
if warmup_unit == "epochs":
378378
self.warmup_steps = int(warmup_steps * self.n_batches_per_epoch)

snorkel/labeling/model/label_model.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -577,7 +577,7 @@ def _set_class_balance(
577577
def _set_constants(self, L: np.ndarray) -> None:
578578
self.n, self.m = L.shape
579579
if self.m < 3:
580-
raise ValueError(f"L_train should have at least 3 labeling functions")
580+
raise ValueError("L_train should have at least 3 labeling functions")
581581
self.t = 1
582582

583583
def _create_tree(self) -> None:
@@ -679,7 +679,7 @@ def _set_warmup_scheduler(self) -> None:
679679
if self.train_config.lr_scheduler_config.warmup_steps:
680680
warmup_steps = self.train_config.lr_scheduler_config.warmup_steps
681681
if warmup_steps < 0:
682-
raise ValueError(f"warmup_steps much greater or equal than 0.")
682+
raise ValueError("warmup_steps much greater or equal than 0.")
683683
warmup_unit = self.train_config.lr_scheduler_config.warmup_unit
684684
if warmup_unit == "epochs":
685685
self.warmup_steps = int(warmup_steps)

0 commit comments

Comments
 (0)