Skip to content

Commit acff977

Browse files
authored
Logger Cleanup (#9828)
1 parent e3ff780 commit acff977

File tree

3 files changed

+2
-16
lines changed

3 files changed

+2
-16
lines changed

segment/train.py

Lines changed: 0 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -91,17 +91,6 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
9191
data_dict = None
9292
if RANK in {-1, 0}:
9393
logger = GenericLogger(opt=opt, console_logger=LOGGER)
94-
# loggers = Loggers(save_dir, weights, opt, hyp, LOGGER) # loggers instance
95-
# if loggers.clearml:
96-
# data_dict = loggers.clearml.data_dict # None if no ClearML dataset or filled in by ClearML
97-
# if loggers.wandb:
98-
# data_dict = loggers.wandb.data_dict
99-
# if resume:
100-
# weights, epochs, hyp, batch_size = opt.weights, opt.epochs, opt.hyp, opt.batch_size
101-
#
102-
# # Register actions
103-
# for k in methods(loggers):
104-
# callbacks.register_action(k, callback=getattr(loggers, k))
10594

10695
# Config
10796
plots = not evolve and not opt.noplots # create plots
@@ -400,7 +389,6 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
400389
'ema': deepcopy(ema.ema).half(),
401390
'updates': ema.updates,
402391
'optimizer': optimizer.state_dict(),
403-
# 'wandb_id': loggers.wandb.wandb_run.id if loggers.wandb else None,
404392
'opt': vars(opt),
405393
'date': datetime.now().isoformat()}
406394

train.py

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,6 @@
5353
one_cycle, print_args, print_mutation, strip_optimizer, yaml_save)
5454
from utils.loggers import Loggers
5555
from utils.loggers.comet.comet_utils import check_comet_resume
56-
from utils.loggers.wandb.wandb_utils import check_wandb_resume
5756
from utils.loss import ComputeLoss
5857
from utils.metrics import fitness
5958
from utils.plots import plot_evolve
@@ -375,7 +374,6 @@ def train(hyp, opt, device, callbacks): # hyp is path/to/hyp.yaml or hyp dictio
375374
'ema': deepcopy(ema.ema).half(),
376375
'updates': ema.updates,
377376
'optimizer': optimizer.state_dict(),
378-
'wandb_id': loggers.wandb.wandb_run.id if loggers.wandb else None,
379377
'opt': vars(opt),
380378
'date': datetime.now().isoformat()}
381379

@@ -483,7 +481,7 @@ def main(opt, callbacks=Callbacks()):
483481
check_requirements()
484482

485483
# Resume (from specified or most recent last.pt)
486-
if opt.resume and not check_wandb_resume(opt) and not check_comet_resume(opt) and not opt.evolve:
484+
if opt.resume and not check_comet_resume(opt) and not opt.evolve:
487485
last = Path(check_file(opt.resume) if isinstance(opt.resume, str) else get_latest_run())
488486
opt_yaml = last.parent.parent / 'opt.yaml' # train options yaml
489487
opt_data = opt.data # original dataset

utils/general.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -956,7 +956,7 @@ def strip_optimizer(f='best.pt', s=''): # from utils.general import *; strip_op
956956
x = torch.load(f, map_location=torch.device('cpu'))
957957
if x.get('ema'):
958958
x['model'] = x['ema'] # replace model with ema
959-
for k in 'optimizer', 'best_fitness', 'wandb_id', 'ema', 'updates': # keys
959+
for k in 'optimizer', 'best_fitness', 'ema', 'updates': # keys
960960
x[k] = None
961961
x['epoch'] = -1
962962
x['model'].half() # to FP16

0 commit comments

Comments
 (0)