We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 367558b commit 9fd5e6bCopy full SHA for 9fd5e6b
src/transformers/deepspeed.py
@@ -250,7 +250,7 @@ def trainer_config_process(self, args):
250
self.fill_match("bf16.enabled", (args.bf16 or args.bf16_full_eval), "bf16|bf16_full_eval")
251
252
# deepspeed's default mode is fp16 unless there is a config that says differently
253
- if self.is_true("bfoat16.enabled"):
+ if self.is_true("bf16.enabled"):
254
self._dtype = torch.bfloat16
255
elif self.is_false("fp16.enabled"):
256
self._dtype = torch.float32
0 commit comments