Further improvements and savings in Jenkins pipeline (#5904)
* Publish artifacts only on the master and release branches * Build CUDA only for Compute Capability 7.5 when building PRs * Run all Windows jobs in a single worker image * Build nightly XGBoost4J SNAPSHOT JARs with Scala 2.12 only * Show skipped Python tests on Windows * Make Graphviz optional for Python tests * Add back C++ tests * Unstash xgboost_cpp_tests * Fix label to CUDA 10.1 * Install cuPy for CUDA 10.1 * Install jsonschema * Address reviewer's feedback
This commit is contained in:
parent
6c0c87216f
commit
ac9136ee49
25
Jenkinsfile
vendored
25
Jenkinsfile
vendored
@ -173,8 +173,10 @@ def Doxygen() {
|
|||||||
sh """
|
sh """
|
||||||
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/doxygen.sh ${BRANCH_NAME}
|
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/doxygen.sh ${BRANCH_NAME}
|
||||||
"""
|
"""
|
||||||
echo 'Uploading doc...'
|
if (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release')) {
|
||||||
s3Upload file: "build/${BRANCH_NAME}.tar.bz2", bucket: 'xgboost-docs', acl: 'PublicRead', path: "doxygen/${BRANCH_NAME}.tar.bz2"
|
echo 'Uploading doc...'
|
||||||
|
s3Upload file: "build/${BRANCH_NAME}.tar.bz2", bucket: 'xgboost-docs', acl: 'PublicRead', path: "doxygen/${BRANCH_NAME}.tar.bz2"
|
||||||
|
}
|
||||||
deleteDir()
|
deleteDir()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -245,8 +247,12 @@ def BuildCUDA(args) {
|
|||||||
def container_type = "gpu_build"
|
def container_type = "gpu_build"
|
||||||
def docker_binary = "docker"
|
def docker_binary = "docker"
|
||||||
def docker_args = "--build-arg CUDA_VERSION=${args.cuda_version}"
|
def docker_args = "--build-arg CUDA_VERSION=${args.cuda_version}"
|
||||||
|
def arch_flag = ""
|
||||||
|
if (env.BRANCH_NAME != 'master' && !(env.BRANCH_NAME.startsWith('release'))) {
|
||||||
|
arch_flag = "-DGPU_COMPUTE_VER=75"
|
||||||
|
}
|
||||||
sh """
|
sh """
|
||||||
${dockerRun} ${container_type} ${docker_binary} ${docker_args} tests/ci_build/build_via_cmake.sh -DUSE_CUDA=ON -DUSE_NCCL=ON -DOPEN_MP:BOOL=ON -DHIDE_CXX_SYMBOLS=ON
|
${dockerRun} ${container_type} ${docker_binary} ${docker_args} tests/ci_build/build_via_cmake.sh -DUSE_CUDA=ON -DUSE_NCCL=ON -DOPEN_MP:BOOL=ON -DHIDE_CXX_SYMBOLS=ON ${arch_flag}
|
||||||
${dockerRun} ${container_type} ${docker_binary} ${docker_args} bash -c "cd python-package && rm -rf dist/* && python setup.py bdist_wheel --universal"
|
${dockerRun} ${container_type} ${docker_binary} ${docker_args} bash -c "cd python-package && rm -rf dist/* && python setup.py bdist_wheel --universal"
|
||||||
${dockerRun} ${container_type} ${docker_binary} ${docker_args} python3 tests/ci_build/rename_whl.py python-package/dist/*.whl ${commit_id} manylinux2010_x86_64
|
${dockerRun} ${container_type} ${docker_binary} ${docker_args} python3 tests/ci_build/rename_whl.py python-package/dist/*.whl ${commit_id} manylinux2010_x86_64
|
||||||
"""
|
"""
|
||||||
@ -254,8 +260,11 @@ def BuildCUDA(args) {
|
|||||||
if (args.cuda_version == '10.0') {
|
if (args.cuda_version == '10.0') {
|
||||||
echo 'Stashing Python wheel...'
|
echo 'Stashing Python wheel...'
|
||||||
stash name: 'xgboost_whl_cuda10', includes: 'python-package/dist/*.whl'
|
stash name: 'xgboost_whl_cuda10', includes: 'python-package/dist/*.whl'
|
||||||
path = ("${BRANCH_NAME}" == 'master') ? '' : "${BRANCH_NAME}/"
|
if (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release')) {
|
||||||
s3Upload bucket: 'xgboost-nightly-builds', path: path, acl: 'PublicRead', workingDir: 'python-package/dist', includePathPattern:'**/*.whl'
|
echo 'Uploading Python wheel...'
|
||||||
|
path = ("${BRANCH_NAME}" == 'master') ? '' : "${BRANCH_NAME}/"
|
||||||
|
s3Upload bucket: 'xgboost-nightly-builds', path: path, acl: 'PublicRead', workingDir: 'python-package/dist', includePathPattern:'**/*.whl'
|
||||||
|
}
|
||||||
echo 'Stashing C++ test executable (testxgboost)...'
|
echo 'Stashing C++ test executable (testxgboost)...'
|
||||||
stash name: 'xgboost_cpp_tests', includes: 'build/testxgboost'
|
stash name: 'xgboost_cpp_tests', includes: 'build/testxgboost'
|
||||||
}
|
}
|
||||||
@ -289,8 +298,10 @@ def BuildJVMDoc() {
|
|||||||
sh """
|
sh """
|
||||||
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_jvm_doc.sh ${BRANCH_NAME}
|
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_jvm_doc.sh ${BRANCH_NAME}
|
||||||
"""
|
"""
|
||||||
echo 'Uploading doc...'
|
if (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release')) {
|
||||||
s3Upload file: "jvm-packages/${BRANCH_NAME}.tar.bz2", bucket: 'xgboost-docs', acl: 'PublicRead', path: "${BRANCH_NAME}.tar.bz2"
|
echo 'Uploading doc...'
|
||||||
|
s3Upload file: "jvm-packages/${BRANCH_NAME}.tar.bz2", bucket: 'xgboost-docs', acl: 'PublicRead', path: "${BRANCH_NAME}.tar.bz2"
|
||||||
|
}
|
||||||
deleteDir()
|
deleteDir()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -10,6 +10,15 @@ def commit_id // necessary to pass a variable from one stage to another
|
|||||||
|
|
||||||
pipeline {
|
pipeline {
|
||||||
agent none
|
agent none
|
||||||
|
|
||||||
|
// Setup common job properties
|
||||||
|
options {
|
||||||
|
timestamps()
|
||||||
|
timeout(time: 240, unit: 'MINUTES')
|
||||||
|
buildDiscarder(logRotator(numToKeepStr: '10'))
|
||||||
|
preserveStashes()
|
||||||
|
}
|
||||||
|
|
||||||
// Build stages
|
// Build stages
|
||||||
stages {
|
stages {
|
||||||
stage('Jenkins Win64: Initialize') {
|
stage('Jenkins Win64: Initialize') {
|
||||||
@ -29,7 +38,7 @@ pipeline {
|
|||||||
steps {
|
steps {
|
||||||
script {
|
script {
|
||||||
parallel ([
|
parallel ([
|
||||||
'build-win64-cuda10.0': { BuildWin64() }
|
'build-win64-cuda10.1': { BuildWin64() }
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
milestone ordinal: 2
|
milestone ordinal: 2
|
||||||
@ -40,8 +49,7 @@ pipeline {
|
|||||||
steps {
|
steps {
|
||||||
script {
|
script {
|
||||||
parallel ([
|
parallel ([
|
||||||
'test-win64-cpu': { TestWin64CPU() },
|
'test-win64-cuda10.1': { TestWin64() },
|
||||||
'test-win64-gpu-cuda10.1': { TestWin64GPU(cuda_target: 'cuda10_1') }
|
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
milestone ordinal: 3
|
milestone ordinal: 3
|
||||||
@ -66,14 +74,18 @@ def checkoutSrcs() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
def BuildWin64() {
|
def BuildWin64() {
|
||||||
node('win64 && build && cuda10') {
|
node('win64 && cuda10_unified') {
|
||||||
unstash name: 'srcs'
|
unstash name: 'srcs'
|
||||||
echo "Building XGBoost for Windows AMD64 target..."
|
echo "Building XGBoost for Windows AMD64 target..."
|
||||||
bat "nvcc --version"
|
bat "nvcc --version"
|
||||||
|
def arch_flag = ""
|
||||||
|
if (env.BRANCH_NAME != 'master' && !(env.BRANCH_NAME.startsWith('release'))) {
|
||||||
|
arch_flag = "-DGPU_COMPUTE_VER=75"
|
||||||
|
}
|
||||||
bat """
|
bat """
|
||||||
mkdir build
|
mkdir build
|
||||||
cd build
|
cd build
|
||||||
cmake .. -G"Visual Studio 15 2017 Win64" -DUSE_CUDA=ON -DCMAKE_VERBOSE_MAKEFILE=ON -DGOOGLE_TEST=ON -DUSE_DMLC_GTEST=ON
|
cmake .. -G"Visual Studio 15 2017 Win64" -DUSE_CUDA=ON -DCMAKE_VERBOSE_MAKEFILE=ON -DGOOGLE_TEST=ON -DUSE_DMLC_GTEST=ON ${arch_flag}
|
||||||
"""
|
"""
|
||||||
bat """
|
bat """
|
||||||
cd build
|
cd build
|
||||||
@ -91,8 +103,11 @@ def BuildWin64() {
|
|||||||
"""
|
"""
|
||||||
echo 'Stashing Python wheel...'
|
echo 'Stashing Python wheel...'
|
||||||
stash name: 'xgboost_whl', includes: 'python-package/dist/*.whl'
|
stash name: 'xgboost_whl', includes: 'python-package/dist/*.whl'
|
||||||
path = ("${BRANCH_NAME}" == 'master') ? '' : "${BRANCH_NAME}/"
|
if (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release')) {
|
||||||
s3Upload bucket: 'xgboost-nightly-builds', path: path, acl: 'PublicRead', workingDir: 'python-package/dist', includePathPattern:'**/*.whl'
|
echo 'Uploading Python wheel...'
|
||||||
|
path = ("${BRANCH_NAME}" == 'master') ? '' : "${BRANCH_NAME}/"
|
||||||
|
s3Upload bucket: 'xgboost-nightly-builds', path: path, acl: 'PublicRead', workingDir: 'python-package/dist', includePathPattern:'**/*.whl'
|
||||||
|
}
|
||||||
echo 'Stashing C++ test executable (testxgboost)...'
|
echo 'Stashing C++ test executable (testxgboost)...'
|
||||||
stash name: 'xgboost_cpp_tests', includes: 'build/testxgboost.exe'
|
stash name: 'xgboost_cpp_tests', includes: 'build/testxgboost.exe'
|
||||||
stash name: 'xgboost_cli', includes: 'xgboost.exe'
|
stash name: 'xgboost_cli', includes: 'xgboost.exe'
|
||||||
@ -100,52 +115,29 @@ def BuildWin64() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def TestWin64CPU() {
|
def TestWin64() {
|
||||||
node('win64 && cpu') {
|
node('win64 && cuda10_unified') {
|
||||||
unstash name: 'srcs'
|
unstash name: 'srcs'
|
||||||
unstash name: 'xgboost_whl'
|
unstash name: 'xgboost_whl'
|
||||||
unstash name: 'xgboost_cli'
|
unstash name: 'xgboost_cli'
|
||||||
echo "Test Win64 CPU"
|
|
||||||
echo "Installing Python wheel..."
|
|
||||||
bat "conda activate && (python -m pip uninstall -y xgboost || cd .)"
|
|
||||||
bat """
|
|
||||||
conda activate && for /R %%i in (python-package\\dist\\*.whl) DO python -m pip install "%%i"
|
|
||||||
"""
|
|
||||||
echo "Installing Python dependencies..."
|
|
||||||
bat """
|
|
||||||
conda activate && conda install -y hypothesis && conda upgrade scikit-learn pandas numpy hypothesis
|
|
||||||
"""
|
|
||||||
echo "Running Python tests..."
|
|
||||||
bat "conda activate && python -m pytest -v -s --fulltrace tests\\python"
|
|
||||||
bat "conda activate && python -m pip uninstall -y xgboost"
|
|
||||||
deleteDir()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def TestWin64GPU(args) {
|
|
||||||
node("win64 && gpu && ${args.cuda_target}") {
|
|
||||||
unstash name: 'srcs'
|
|
||||||
unstash name: 'xgboost_whl'
|
|
||||||
unstash name: 'xgboost_cpp_tests'
|
unstash name: 'xgboost_cpp_tests'
|
||||||
echo "Test Win64 GPU (${args.cuda_target})"
|
echo "Test Win64"
|
||||||
bat "nvcc --version"
|
bat "nvcc --version"
|
||||||
echo "Running C++ tests..."
|
echo "Running C++ tests..."
|
||||||
bat "build\\testxgboost.exe"
|
bat "build\\testxgboost.exe"
|
||||||
echo "Installing Python wheel..."
|
|
||||||
bat "conda activate && (python -m pip uninstall -y xgboost || cd .)"
|
|
||||||
bat """
|
|
||||||
conda activate && for /R %%i in (python-package\\dist\\*.whl) DO python -m pip install "%%i"
|
|
||||||
"""
|
|
||||||
echo "Installing Python dependencies..."
|
echo "Installing Python dependencies..."
|
||||||
def cuda_short_ver = args.cuda_target.replaceAll('_', '')
|
def env_name = 'win64_' + UUID.randomUUID().toString().replaceAll('-', '')
|
||||||
|
bat "conda env create -n ${env_name} --file=tests/ci_build/win64_conda_env.yml"
|
||||||
|
echo "Installing Python wheel..."
|
||||||
bat """
|
bat """
|
||||||
conda activate && conda install -y hypothesis && conda upgrade scikit-learn pandas numpy hypothesis && python -m pip install cupy-${cuda_short_ver}
|
conda activate ${env_name} && for /R %%i in (python-package\\dist\\*.whl) DO python -m pip install "%%i"
|
||||||
"""
|
"""
|
||||||
echo "Running Python tests..."
|
echo "Running Python tests..."
|
||||||
|
bat "conda activate ${env_name} && python -m pytest -v -s -rxXs --fulltrace tests\\python"
|
||||||
bat """
|
bat """
|
||||||
conda activate && python -m pytest -v -s --fulltrace -m "(not slow) and (not mgpu)" tests\\python-gpu
|
conda activate ${env_name} && python -m pytest -v -s -rxXs --fulltrace -m "(not slow) and (not mgpu)" tests\\python-gpu
|
||||||
"""
|
"""
|
||||||
bat "conda activate && python -m pip uninstall -y xgboost"
|
bat "conda env remove --name ${env_name}"
|
||||||
deleteDir()
|
deleteDir()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -20,16 +20,5 @@ cd jvm-packages
|
|||||||
# Deploy to S3 bucket xgboost-maven-repo
|
# Deploy to S3 bucket xgboost-maven-repo
|
||||||
mvn --no-transfer-progress package deploy -P release-to-s3 -Dspark.version=${spark_version} -DskipTests
|
mvn --no-transfer-progress package deploy -P release-to-s3 -Dspark.version=${spark_version} -DskipTests
|
||||||
|
|
||||||
# Compile XGBoost4J with Scala 2.11 too
|
|
||||||
mvn clean
|
|
||||||
# Rename artifactId of all XGBoost4J packages with suffix _2.11
|
|
||||||
sed -i -e 's/<artifactId>xgboost\(.*\)_[0-9\.]\+/<artifactId>xgboost\1_2.11/' $(find . -name pom.xml)
|
|
||||||
# Modify scala.version and scala.binary.version fields
|
|
||||||
sed -i -e 's/<scala\.version>[0-9\.]\+/<scala.version>2.11.12/' $(find . -name pom.xml)
|
|
||||||
sed -i -e 's/<scala\.binary\.version>[0-9\.]\+/<scala.binary.version>2.11/' $(find . -name pom.xml)
|
|
||||||
|
|
||||||
# Re-build and deploy
|
|
||||||
mvn --no-transfer-progress package deploy -P release-to-s3 -Dspark.version=${spark_version} -DskipTests
|
|
||||||
|
|
||||||
set +x
|
set +x
|
||||||
set +e
|
set +e
|
||||||
|
|||||||
18
tests/ci_build/win64_conda_env.yml
Normal file
18
tests/ci_build/win64_conda_env.yml
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
name: win64_env
|
||||||
|
channels:
|
||||||
|
- conda-forge
|
||||||
|
dependencies:
|
||||||
|
- python=3.7
|
||||||
|
- numpy
|
||||||
|
- scipy
|
||||||
|
- matplotlib
|
||||||
|
- scikit-learn
|
||||||
|
- pandas
|
||||||
|
- pytest
|
||||||
|
- python-graphviz
|
||||||
|
- boto3
|
||||||
|
- hypothesis
|
||||||
|
- jsonschema
|
||||||
|
- pip
|
||||||
|
- pip:
|
||||||
|
- cupy-cuda101
|
||||||
@ -1,10 +1,13 @@
|
|||||||
import os
|
import os
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
import pytest
|
||||||
sys.path.append("tests/python")
|
sys.path.append("tests/python")
|
||||||
|
import testing as tm
|
||||||
import test_demos as td # noqa
|
import test_demos as td # noqa
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(**tm.no_cupy())
|
||||||
def test_data_iterator():
|
def test_data_iterator():
|
||||||
script = os.path.join(td.PYTHON_DEMO_DIR, 'data_iterator.py')
|
script = os.path.join(td.PYTHON_DEMO_DIR, 'data_iterator.py')
|
||||||
cmd = ['python', script]
|
cmd = ['python', script]
|
||||||
|
|||||||
@ -15,7 +15,7 @@ except ImportError:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
pytestmark = pytest.mark.skipif(**tm.no_matplotlib())
|
pytestmark = pytest.mark.skipif(**tm.no_multiple(tm.no_matplotlib(), tm.no_graphviz()))
|
||||||
|
|
||||||
|
|
||||||
dpath = 'demo/data/'
|
dpath = 'demo/data/'
|
||||||
|
|||||||
@ -437,6 +437,7 @@ def test_sklearn_api_gblinear():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(**tm.no_matplotlib())
|
@pytest.mark.skipif(**tm.no_matplotlib())
|
||||||
|
@pytest.mark.skipif(**tm.no_graphviz())
|
||||||
def test_sklearn_plotting():
|
def test_sklearn_plotting():
|
||||||
from sklearn.datasets import load_iris
|
from sklearn.datasets import load_iris
|
||||||
|
|
||||||
|
|||||||
@ -98,6 +98,26 @@ def no_json_schema():
|
|||||||
return {'condition': True, 'reason': reason}
|
return {'condition': True, 'reason': reason}
|
||||||
|
|
||||||
|
|
||||||
|
def no_graphviz():
|
||||||
|
reason = 'graphviz is not installed'
|
||||||
|
try:
|
||||||
|
import graphviz # noqa
|
||||||
|
return {'condition': False, 'reason': reason}
|
||||||
|
except ImportError:
|
||||||
|
return {'condition': True, 'reason': reason}
|
||||||
|
|
||||||
|
|
||||||
|
def no_multiple(*args):
|
||||||
|
condition = False
|
||||||
|
reason = ''
|
||||||
|
for arg in args:
|
||||||
|
condition = (condition or arg['condition'])
|
||||||
|
if arg['condition']:
|
||||||
|
reason = arg['reason']
|
||||||
|
break
|
||||||
|
return {'condition': condition, 'reason': reason}
|
||||||
|
|
||||||
|
|
||||||
# Contains a dataset in numpy format as well as the relevant objective and metric
|
# Contains a dataset in numpy format as well as the relevant objective and metric
|
||||||
class TestDataset:
|
class TestDataset:
|
||||||
def __init__(self, name, get_dataset, objective, metric
|
def __init__(self, name, get_dataset, objective, metric
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user