Skip to content
This repository was archived by the owner on Jan 15, 2024. It is now read-only.
61 changes: 0 additions & 61 deletions ci/jenkins/Jenkinsfile_py2_cpu_unittest

This file was deleted.

61 changes: 0 additions & 61 deletions ci/jenkins/Jenkinsfile_py2_gpu_unittest

This file was deleted.

6 changes: 6 additions & 0 deletions scripts/bert/finetune_classifier.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,12 @@
if args.dtype == 'float16':
try:
from mxnet.contrib import amp # pylint: disable=ungrouped-imports
# monkey patch amp list since topk does not support fp16
amp.lists.symbol.FP32_FUNCS.append('topk')
amp.lists.symbol.FP16_FP32_FUNCS.remove('topk')
amp.init()
except ValueError:
# topk is already in the FP32_FUNCS list
amp.init()
except ImportError:
# amp is not available
Expand Down
2 changes: 1 addition & 1 deletion scripts/tests/test_scripts.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,7 +375,7 @@ def test_finetune_inference(dataset):
@pytest.mark.remote_required
@pytest.mark.integration
@pytest.mark.parametrize('dataset', ['WNLI'])
@pytest.mark.parametrize('dtype', ['float32']) # TODO fix 'float16'
@pytest.mark.parametrize('dtype', ['float32', 'float16'])
def test_finetune_train(dataset, dtype):
arguments = ['--log_interval', '100', '--epsilon', '1e-8', '--optimizer',
'adam', '--gpu', '0', '--epochs', '2', '--dtype', dtype]
Expand Down