Skip to content

Commit aa4ee6a

Browse files
authored
[BLOCKING] Adding JVM doc build to Jenkins CI (dmlc#3567)
* Adding Java/Scala doc build to Jenkins CI * Deploy built doc to S3 bucket * Build doc only for branches * Build doc first, to get doc faster for branch updates * Have ReadTheDocs download doc tarball from S3 * Update JVM doc links * Put doc build commands in a script * Specify Spark 2.3+ requirement for XGBoost4J-Spark * Build GPU wheel without NCCL, to reduce binary size
1 parent bad7604 commit aa4ee6a

File tree

10 files changed

+89
-9
lines changed

10 files changed

+89
-9
lines changed

Jenkinsfile

Lines changed: 24 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ dockerRun = 'tests/ci_build/ci_build.sh'
99
def buildMatrix = [
1010
[ "enabled": true, "os" : "linux", "withGpu": true, "withNccl": true, "withOmp": true, "pythonVersion": "2.7", "cudaVersion": "9.2" ],
1111
[ "enabled": true, "os" : "linux", "withGpu": true, "withNccl": true, "withOmp": true, "pythonVersion": "2.7", "cudaVersion": "8.0" ],
12-
[ "enabled": false, "os" : "linux", "withGpu": false, "withNccl": false, "withOmp": true, "pythonVersion": "2.7", "cudaVersion": "" ],
12+
[ "enabled": true, "os" : "linux", "withGpu": true, "withNccl": false, "withOmp": true, "pythonVersion": "2.7", "cudaVersion": "8.0" ],
1313
]
1414

1515
pipeline {
@@ -34,6 +34,28 @@ pipeline {
3434
milestone label: 'Sources ready', ordinal: 1
3535
}
3636
}
37+
stage('Build doc') {
38+
agent any
39+
steps {
40+
script {
41+
if (env.CHANGE_ID == null) { // This is a branch
42+
def commit_id = "${GIT_COMMIT}"
43+
def branch_name = "${GIT_LOCAL_BRANCH}"
44+
echo 'Building doc...'
45+
dir ('jvm-packages') {
46+
sh "bash ./build_doc.sh ${commit_id}"
47+
archiveArtifacts artifacts: "${commit_id}.tar.bz2", allowEmptyArchive: true
48+
echo 'Deploying doc...'
49+
withAWS(credentials:'xgboost-doc-bucket') {
50+
s3Upload file: "${commit_id}.tar.bz2", bucket: 'xgboost-docs', acl: 'PublicRead', path: "${branch_name}.tar.bz2"
51+
}
52+
}
53+
} else { // This is a pull request
54+
echo 'Skipping doc build step for pull request'
55+
}
56+
}
57+
}
58+
}
3759
stage('Build & Test') {
3860
steps {
3961
script {
@@ -121,7 +143,7 @@ def cmakeOptions(conf) {
121143
}
122144

123145
def getBuildName(conf) {
124-
def gpuLabel = conf['withGpu'] ? "_cuda" + conf['cudaVersion'] : "_cpu"
146+
def gpuLabel = conf['withGpu'] ? ("_cuda" + conf['cudaVersion'] + (conf['withNccl'] ? "_nccl" : "_nonccl")) : "_cpu"
125147
def ompLabel = conf['withOmp'] ? "_omp" : ""
126148
def pyLabel = "_py${conf['pythonVersion']}"
127149
return "${conf['os']}${gpuLabel}${ompLabel}${pyLabel}"

doc/conf.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,11 +12,18 @@
1212
# All configuration values have a default; values that are commented out
1313
# serve to show the default.
1414
from subprocess import call
15+
from sh.contrib import git
16+
import urllib.request
1517
from recommonmark.parser import CommonMarkParser
1618
import sys
1719
import os, subprocess
1820
import shlex
1921
import guzzle_sphinx_theme
22+
23+
git_branch = str(git('rev-parse', '--abbrev-ref', 'HEAD')).rstrip('\n')
24+
filename, _ = urllib.request.urlretrieve('https://s3-us-west-2.amazonaws.com/xgboost-docs/{}.tar.bz2'.format(git_branch))
25+
call('if [ -d tmp ]; then rm -rf tmp; fi; mkdir -p tmp/jvm; cd tmp/jvm; tar xvf {}'.format(filename), shell=True)
26+
2027
# If extensions (or modules to document with autodoc) are in another directory,
2128
# add these directories to sys.path here. If the directory is relative to the
2229
# documentation root, use os.path.abspath to make it absolute, like shown here.
@@ -94,6 +101,7 @@
94101
# List of patterns, relative to source directory, that match files and
95102
# directories to ignore when looking for source files.
96103
exclude_patterns = ['_build']
104+
html_extra_path = ['./tmp']
97105

98106
# The reST default role (used for this markup: `text`) to use for all
99107
# documents.

doc/jvm/index.rst

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -58,10 +58,9 @@ For sbt, please add the repository and dependency in build.sbt as following:
5858
5959
If you want to use XGBoost4J-Spark, replace ``xgboost4j`` with ``xgboost4j-spark``.
6060

61-
.. note:: Spark 2.0 Required
62-
63-
After integrating with Dataframe/Dataset APIs of Spark 2.0, XGBoost4J-Spark only supports compile with Spark 2.x. You can build XGBoost4J-Spark as a component of XGBoost4J by running ``mvn package``, and you can specify the version of spark with ``mvn -Dspark.version=2.0.0 package``. (To continue working with Spark 1.x, the users are supposed to update pom.xml by modifying the properties like ``spark.version``, ``scala.version``, and ``scala.binary.version``. Users also need to change the implementation by replacing ``SparkSession`` with ``SQLContext`` and the type of API parameters from ``Dataset[_]`` to ``Dataframe``)
61+
.. note:: XGBoost4J-Spark requires Spark 2.3+
6462

63+
XGBoost4J-Spark now requires Spark 2.3+. Latest versions of XGBoost4J-Spark uses facilities of `org.apache.spark.ml.param.shared` extensively to provide for a tight integration with Spark MLLIB framework, and these facilities are not fully available on earlier versions of Spark.
6564

6665
Installation from maven repo
6766
============================
@@ -150,7 +149,7 @@ Contents
150149
java_intro
151150
XGBoost4J-Spark Tutorial <xgboost4j_spark_tutorial>
152151
Code Examples <https://github.com/dmlc/xgboost/tree/master/jvm-packages/xgboost4j-example>
153-
XGBoost4J Java API <http://dmlc.ml/docs/javadocs/index.html>
154-
XGBoost4J Scala API <http://dmlc.ml/docs/scaladocs/xgboost4j/index.html>
155-
XGBoost4J-Spark Scala API <http://dmlc.ml/docs/scaladocs/xgboost4j-spark/index.html>
156-
XGBoost4J-Flink Scala API <http://dmlc.ml/docs/scaladocs/xgboost4j-flink/index.html>
152+
XGBoost4J Java API <javadocs/index>
153+
XGBoost4J Scala API <scaladocs/xgboost4j/index>
154+
XGBoost4J-Spark Scala API <scaladocs/xgboost4j-spark/index>
155+
XGBoost4J-Flink Scala API <scaladocs/xgboost4j-flink/index>

doc/jvm/javadocs/index.rst

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
==================
2+
XGBoost4J Java API
3+
==================
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
=========================
2+
XGBoost4J-Flink Scala API
3+
=========================
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
=========================
2+
XGBoost4J-Spark Scala API
3+
=========================
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
===================
2+
XGBoost4J Scala API
3+
===================

doc/jvm/xgboost4j_spark_tutorial.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,10 @@ and then refer to the snapshot dependency by adding:
6161
<version>next_version_num-SNAPSHOT</version>
6262
</dependency>
6363
64+
.. note:: XGBoost4J-Spark requires Spark 2.3+
65+
66+
XGBoost4J-Spark now requires Spark 2.3+. Latest versions of XGBoost4J-Spark uses facilities of `org.apache.spark.ml.param.shared` extensively to provide for a tight integration with Spark MLLIB framework, and these facilities are not fully available on earlier versions of Spark.
67+
6468
Data Preparation
6569
================
6670

doc/requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,3 +2,4 @@ sphinx
22
mock
33
guzzle_sphinx_theme
44
breathe
5+
sh>=1.12.14

jvm-packages/build_doc.sh

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
#!/bin/bash
2+
3+
if [ $# -ne 1 ]; then
4+
echo "Usage: $0 [commit id]"
5+
exit 1
6+
fi
7+
8+
set -e
9+
set -x
10+
11+
commit_id=$1
12+
13+
# Install JVM packages in local Maven repository
14+
mvn install -DskipTests
15+
# Build Scaladocs
16+
mvn scala:doc -DskipTests
17+
# Build Javadocs
18+
mvn javadoc:javadoc -DskipTests
19+
20+
# Package JVM docs in a tarball
21+
mkdir -p tmp/scaladocs
22+
cp -rv xgboost4j/target/site/apidocs/ ./tmp/javadocs/
23+
cp -rv xgboost4j/target/site/scaladocs/ ./tmp/scaladocs/xgboost4j/
24+
cp -rv xgboost4j-spark/target/site/scaladocs/ ./tmp/scaladocs/xgboost4j-spark/
25+
cp -rv xgboost4j-flink/target/site/scaladocs/ ./tmp/scaladocs/xgboost4j-flink/
26+
27+
cd tmp
28+
tar cvjf ${commit_id}.tar.bz2 javadocs/ scaladocs/
29+
mv ${commit_id}.tar.bz2 ..
30+
cd ..
31+
rm -rfv tmp/
32+
33+
set +x
34+
set +e

0 commit comments

Comments
 (0)