@@ -35,11 +35,84 @@ utils.main_wrapper(
35
35
core_logic : {
36
36
utils. parallel_stage(' Doc Test' , [
37
37
build_steps. test_doctest(' gluon-nlp-cpu-py3' , ' cpu/py3' ,
38
- ' src/gluonnlp' , ' src/gluonnlp' , 4 )
38
+ ' src/gluonnlp' , ' src/gluonnlp' , 4 )
39
39
])
40
40
41
+ // Compile example notebooks, Doctest & Create Website
42
+ node { // Single node parallelism
43
+ ws(' gluon-nlp-cpu-py3' ) {
44
+ stage(" Prepare conda environment for website" ) {
45
+ utils. init_git()
46
+ // Require a full environment here due to sphinx build step
47
+ // after compiling and downloading the notebooks
48
+ sh ' source ci/prepare_clean_env.sh cpu/py3'
49
+ }
50
+
51
+ stage(" Create Website" ) {
52
+ def tests = [:]
53
+ for (f in findFiles(glob : ' **/docs/examples/*/*.md' )) {
54
+ def md_file = f. toString() // Convert FileWrapper to String
55
+ def short_name = md_file[" docs/examples/" . length().. -1 ]
56
+ tests[short_name] = { ->
57
+ def base_name = md_file[0 .. -4 ] + ' '
58
+ def ipynb_file = base_name + ' .ipynb'
59
+ def stdout_file = base_name + ' .stdout.log'
60
+ def stderr_file = base_name + ' .stderr.log'
61
+ stage(short_name) { // remove common path from name
62
+ // Submit AWS Batch jobs for each example notebook
63
+ // The converted notebooks and the conversion logs are
64
+ // saved to S3 and retrieved on the CI server once the jobs
65
+ // finished.
66
+ sh """
67
+ conda activate ./conda/cpu/py3
68
+
69
+ python3 ci/batch/submit-job.py --region us-east-1 --wait \
70
+ --timeout 1800 --saved-output ./docs/examples --conda-env docker/py3 \
71
+ --name GluonNLP-${ env.BRANCH_NAME} -${ env.BUILD_NUMBER} \
72
+ --save-path batch/${ env.BRANCH_NAME} /${ env.BUILD_NUMBER} /docs/examples \
73
+ --work-dir . --source-ref refs/pull/${ env.CHANGE_ID} /head \
74
+ --command \" python3 docs/md2ipynb.py ${ md_file} > ${ stdout_file} 2> ${ stderr_file} \"
75
+ BATCH_EXIT_CODE=\$ ?
76
+
77
+ aws s3api wait object-exists --bucket gluon-nlp-staging \
78
+ --key batch/${ env.BRANCH_NAME} /${ env.BUILD_NUMBER} /${ stderr_file}
79
+ aws s3 cp s3://gluon-nlp-staging/batch/${ env.BRANCH_NAME} /${ env.BUILD_NUMBER} /${ stderr_file} ${ stderr_file}
80
+ cat ${ stderr_file}
81
+
82
+ aws s3api wait object-exists --bucket gluon-nlp-staging \
83
+ --key batch/${ env.BRANCH_NAME} /${ env.BUILD_NUMBER} /${ stdout_file}
84
+ aws s3 cp s3://gluon-nlp-staging/batch/${ env.BRANCH_NAME} /${ env.BUILD_NUMBER} /${ stdout_file} ${ stdout_file}
85
+ cat ${ stdout_file}
86
+
87
+ aws s3 cp s3://gluon-nlp-staging/batch/${ env.BRANCH_NAME} /${ env.BUILD_NUMBER} /${ ipynb_file} ${ ipynb_file}
88
+
89
+ exit \$ BATCH_EXIT_CODE
90
+ """
91
+ }
92
+ }
93
+ }
94
+
95
+ parallel tests
96
+ }
97
+
98
+ stage(" Upload Website" ) {
99
+ if (env. BRANCH_NAME . startsWith(' PR-' )){
100
+ bucket = ' gluon-nlp-staging'
101
+ path = env. BRANCH_NAME + ' /' + env. BUILD_NUMBER
102
+ } else {
103
+ bucket = ' gluon-nlp'
104
+ path = env. BRANCH_NAME
105
+ }
106
+ sh """
107
+ conda activate ./conda/cpu/py3
108
+ make docs
109
+ ci/upload_doc.sh ${ bucket} ${ path}
110
+ """
111
+ }
112
+ }
113
+ }
114
+
41
115
utils. parallel_stage(' Documentation' , [
42
- build_steps. create_website(' gluon-nlp-gpu-py3' , ' gpu/py3' ),
43
116
build_steps. website_linkcheck(' gluon-nlp-cpu-py3' , ' cpu/py3' )
44
117
])
45
118
0 commit comments