Skip to content
This repository was archived by the owner on Jan 15, 2024. It is now read-only.

Commit 6c6ad76

Browse files
authored
skip failing tests in mxnet master (#685)
1 parent 4b83eb6 commit 6c6ad76

File tree

5 files changed

+59
-40
lines changed

5 files changed

+59
-40
lines changed

Jenkinsfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ stage("Unit Test") {
3535
withCredentials([string(credentialsId: 'GluonNLPCodeCov', variable: 'CODECOV_TOKEN')]) {
3636
ws('workspace/gluon-nlp-py3-master') {
3737
checkout scm
38-
sh("ci/step_unit_test.sh py3-master ${cov_flag} ${capture_flag}")
38+
sh("ci/step_unit_test_master.sh py3-master ${cov_flag} ${capture_flag}")
3939
}
4040
}
4141
}

ci/step_unit_test_master.sh

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
#!/bin/bash
2+
lang=$1
3+
branch=$2
4+
capture_flag=$3
5+
set -ex
6+
export LD_LIBRARY_PATH=/usr/local/cuda/lib64
7+
source ci/prepare_clean_env.sh ${lang}
8+
ci/install_dep.sh
9+
export CUDA_VISIBLE_DEVICES=$EXECUTOR_NUMBER
10+
pytest -v ${capture_flag} -n 4 -m "not (serial or skip_master)" --durations=50 tests/unittest --cov gluonnlp
11+
coverage xml
12+
ci/codecov.sh -c -F ${branch},${lang},notserial -n unittests -f coverage.xml
13+
pytest -v ${capture_flag} -n 0 -m "serial and (not skip_master)" --durations=50 tests/unittest --cov gluonnlp
14+
coverage xml
15+
ci/codecov.sh -c -F ${branch},${lang},serial -n unittests -f coverage.xml
16+
pytest -v ${capture_flag} -n 4 -m "not (serial or skip_master)" --durations=50 scripts --cov scripts
17+
coverage xml
18+
ci/codecov.sh -c -F ${branch},${lang},notserial -n integration -f coverage.xml
19+
pytest -v ${capture_flag} -n 0 -m "serial and (not skip_master)" --durations=50 scripts --cov scripts
20+
coverage xml
21+
ci/codecov.sh -c -F ${branch},${lang},serial -n integration -f coverage.xml
22+
set +ex

pytest.ini

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,3 +3,4 @@ markers =
33
serial: mark a test that requires more resources to run that are thus only suitable for serial run.
44
remote_required: mark a test that requires internet access.
55
gpu: mark a test that requires GPU.
6+
skip_master: mark a test that is temporarily skipped for mxnet master validation.

scripts/tests/test_scripts.py

Lines changed: 33 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -60,6 +60,7 @@ def test_glove():
6060
time.sleep(5)
6161

6262

63+
@pytest.mark.skip_master
6364
@pytest.mark.serial
6465
@pytest.mark.remote_required
6566
@pytest.mark.gpu
@@ -102,18 +103,15 @@ def test_embedding_evaluate_from_path(evaluateanalogies, maxvocabsize):
102103
@pytest.mark.serial
103104
@pytest.mark.remote_required
104105
@pytest.mark.gpu
105-
def test_sentiment_analysis_finetune():
106-
process = subprocess.check_call(['python', './scripts/sentiment_analysis/finetune_lm.py',
107-
'--gpu', '0', '--batch_size', '32', '--bucket_type', 'fixed',
108-
'--epochs', '1', '--dropout', '0', '--no_pretrained',
109-
'--lr', '0.005', '--valid_ratio', '0.1',
110-
'--save-prefix', 'imdb_lstm_200'])
111-
time.sleep(5)
112-
process = subprocess.check_call(['python', './scripts/sentiment_analysis/finetune_lm.py',
113-
'--gpu', '0', '--batch_size', '32', '--bucket_type', 'fixed',
114-
'--epochs', '1', '--dropout', '0',
115-
'--lr', '0.005', '--valid_ratio', '0.1',
116-
'--save-prefix', 'imdb_lstm_200'])
106+
@pytest.mark.parametrize('use_pretrained', [True, False])
107+
def test_sentiment_analysis_finetune(use_pretrained):
108+
args = ['--gpu', '0', '--batch_size', '32', '--bucket_type', 'fixed',
109+
'--epochs', '1', '--dropout', '0',
110+
'--lr', '0.005', '--valid_ratio', '0.1',
111+
'--save-prefix', 'imdb_lstm_200']
112+
if not use_pretrained:
113+
args.append('--no_pretrained')
114+
process = subprocess.check_call(['python', './scripts/sentiment_analysis/finetune_lm.py']+args)
117115
time.sleep(5)
118116

119117
@pytest.mark.serial
@@ -126,18 +124,21 @@ def test_sentiment_analysis_textcnn():
126124
'--data_name', 'MR', '--save-prefix', 'sa-model'])
127125
time.sleep(5)
128126

127+
@pytest.mark.skip_master
129128
@pytest.mark.remote_required
130-
def test_sampling():
131-
process = subprocess.check_call(['python', './scripts/text_generation/sequence_sampling.py',
132-
'--use-beam-search', '--bos', 'I love it', '--beam_size', '2',
133-
'--print_num', '1'])
134-
time.sleep(5)
135-
process = subprocess.check_call(['python', './scripts/text_generation/sequence_sampling.py',
136-
'--use-sampling', '--bos', 'I love it', '--beam_size', '2',
137-
'--print_num', '1', '--temperature', '1.0'])
129+
@pytest.mark.parametrize('method', ['beam_search', 'sampling'])
130+
def test_sampling(method):
131+
args = ['--bos', 'I love it', '--beam_size', '2', '--print_num', '1']
132+
if method == 'beam_search':
133+
args.append('--use-beam-search')
134+
if method == 'sampling':
135+
args.extend(['--use-sampling', '--temperature', '1.0'])
136+
process = subprocess.check_call(['python', './scripts/text_generation/sequence_sampling.py']
137+
+ args)
138138
time.sleep(5)
139139

140140

141+
@pytest.mark.skip_master
141142
@pytest.mark.serial
142143
@pytest.mark.remote_required
143144
@pytest.mark.gpu
@@ -153,23 +154,16 @@ def test_gnmt():
153154
@pytest.mark.serial
154155
@pytest.mark.remote_required
155156
@pytest.mark.gpu
156-
def test_transformer():
157-
process = subprocess.check_call(['python', './scripts/machine_translation/train_transformer.py',
158-
'--dataset', 'TOY', '--src_lang', 'en', '--tgt_lang', 'de',
159-
'--batch_size', '32', '--optimizer', 'adam',
160-
'--num_accumulated', '1', '--lr', '1.0',
161-
'--warmup_steps', '2000', '--save_dir', 'test',
162-
'--epochs', '1', '--gpus', '0', '--scaled', '--average_start',
163-
'1', '--num_buckets', '5', '--bleu', 'tweaked', '--num_units',
164-
'32', '--hidden_size', '64', '--num_layers', '2',
165-
'--num_heads', '4', '--test_batch_size', '32'])
166-
process = subprocess.check_call(['python', './scripts/machine_translation/train_transformer.py',
167-
'--dataset', 'TOY', '--src_lang', 'en', '--tgt_lang', 'de',
168-
'--batch_size', '32', '--optimizer', 'adam',
169-
'--num_accumulated', '1', '--lr', '1.0',
170-
'--warmup_steps', '2000', '--save_dir', 'test',
171-
'--epochs', '1', '--gpus', '0', '--scaled', '--average_start',
172-
'1', '--num_buckets', '5', '--bleu', '13a', '--num_units',
173-
'32', '--hidden_size', '64', '--num_layers', '2',
174-
'--num_heads', '4', '--test_batch_size', '32'])
157+
@pytest.mark.parametrize('bleu', ['tweaked', '13a'])
158+
def test_transformer(bleu):
159+
args = ['--dataset', 'TOY', '--src_lang', 'en', '--tgt_lang', 'de',
160+
'--batch_size', '32', '--optimizer', 'adam',
161+
'--num_accumulated', '1', '--lr', '1.0',
162+
'--warmup_steps', '2000', '--save_dir', 'test',
163+
'--epochs', '1', '--gpus', '0', '--scaled', '--average_start',
164+
'1', '--num_buckets', '5', '--bleu', bleu, '--num_units',
165+
'32', '--hidden_size', '64', '--num_layers', '2',
166+
'--num_heads', '4', '--test_batch_size', '32']
167+
process = subprocess.check_call(['python', './scripts/machine_translation/train_transformer.py']
168+
+args)
175169
time.sleep(5)

tests/unittest/test_sequence_sampler.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@ def test_beam_search_score(length, alpha, K):
2727
lp = (K + length) ** alpha / (K + 1) ** alpha
2828
assert_allclose(scores.asnumpy(), sum_log_probs.asnumpy() / lp, 1E-5, 1E-5)
2929

30+
@pytest.mark.skip_master
31+
@pytest.mark.serial
3032
def test_sequence_sampler():
3133
vocab_size = np.random.randint(5, 20)
3234
batch_size = 1000

0 commit comments

Comments
 (0)