[CI] Add ARM64 test to Jenkins pipeline (#6643)
* Add ARM64 test to Jenkins pipeline * Check for bundled libgomp * Use a separate test suite for ARM64 * Ensure that x86 jobs don't run on ARM workers
This commit is contained in:
parent
1b70a323a7
commit
55ee2bd77f
46
Jenkinsfile
vendored
46
Jenkinsfile
vendored
@ -56,6 +56,7 @@ pipeline {
|
|||||||
parallel ([
|
parallel ([
|
||||||
'clang-tidy': { ClangTidy() },
|
'clang-tidy': { ClangTidy() },
|
||||||
'build-cpu': { BuildCPU() },
|
'build-cpu': { BuildCPU() },
|
||||||
|
'build-cpu-arm64': { BuildCPUARM64() },
|
||||||
'build-cpu-rabit-mock': { BuildCPUMock() },
|
'build-cpu-rabit-mock': { BuildCPUMock() },
|
||||||
// Build reference, distribution-ready Python wheel with CUDA 10.0
|
// Build reference, distribution-ready Python wheel with CUDA 10.0
|
||||||
// using CentOS 6 image
|
// using CentOS 6 image
|
||||||
@ -77,6 +78,7 @@ pipeline {
|
|||||||
script {
|
script {
|
||||||
parallel ([
|
parallel ([
|
||||||
'test-python-cpu': { TestPythonCPU() },
|
'test-python-cpu': { TestPythonCPU() },
|
||||||
|
'test-python-cpu-arm64': { TestPythonCPUARM64() },
|
||||||
// artifact_cuda_version doesn't apply to RMM tests; RMM tests will always match CUDA version between artifact and host env
|
// artifact_cuda_version doesn't apply to RMM tests; RMM tests will always match CUDA version between artifact and host env
|
||||||
'test-python-gpu-cuda10.2': { TestPythonGPU(artifact_cuda_version: '10.0', host_cuda_version: '10.2', test_rmm: true) },
|
'test-python-gpu-cuda10.2': { TestPythonGPU(artifact_cuda_version: '10.0', host_cuda_version: '10.2', test_rmm: true) },
|
||||||
'test-python-gpu-cuda11.0-cross': { TestPythonGPU(artifact_cuda_version: '10.0', host_cuda_version: '11.0') },
|
'test-python-gpu-cuda11.0-cross': { TestPythonGPU(artifact_cuda_version: '10.0', host_cuda_version: '11.0') },
|
||||||
@ -164,6 +166,35 @@ def BuildCPU() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def BuildCPUARM64() {
|
||||||
|
node('linux && arm64') {
|
||||||
|
unstash name: 'srcs'
|
||||||
|
echo "Build CPU ARM64"
|
||||||
|
def container_type = "aarch64"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
def wheel_tag = "manylinux2014_aarch64"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/build_via_cmake.sh --conda-env=aarch64_test -DOPEN_MP:BOOL=ON -DHIDE_CXX_SYMBOL=ON
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} bash -c "cd build && ctest --extra-verbose"
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} bash -c "cd python-package && rm -rf dist/* && python setup.py bdist_wheel --universal"
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} python tests/ci_build/rename_whl.py python-package/dist/*.whl ${commit_id} ${wheel_tag}
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} auditwheel repair --plat ${wheel_tag} python-package/dist/*.whl
|
||||||
|
mv -v wheelhouse/*.whl python-package/dist/
|
||||||
|
# Make sure that libgomp.so is vendored in the wheel
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} bash -c "unzip -l python-package/dist/*.whl | grep libgomp || exit -1"
|
||||||
|
"""
|
||||||
|
echo 'Stashing Python wheel...'
|
||||||
|
stash name: "xgboost_whl_arm64_cpu", includes: 'python-package/dist/*.whl'
|
||||||
|
if (env.BRANCH_NAME == 'master' || env.BRANCH_NAME.startsWith('release')) {
|
||||||
|
echo 'Uploading Python wheel...'
|
||||||
|
path = ("${BRANCH_NAME}" == 'master') ? '' : "${BRANCH_NAME}/"
|
||||||
|
s3Upload bucket: 'xgboost-nightly-builds', path: path, acl: 'PublicRead', workingDir: 'python-package/dist', includePathPattern:'**/*.whl'
|
||||||
|
}
|
||||||
|
stash name: 'xgboost_cli', includes: 'xgboost'
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
def BuildCPUMock() {
|
def BuildCPUMock() {
|
||||||
node('linux && cpu') {
|
node('linux && cpu') {
|
||||||
unstash name: 'srcs'
|
unstash name: 'srcs'
|
||||||
@ -304,6 +335,21 @@ def TestPythonCPU() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
def TestPythonCPUARM64() {
|
||||||
|
node('linux && arm64') {
|
||||||
|
unstash name: "xgboost_whl_arm64_cpu"
|
||||||
|
unstash name: 'srcs'
|
||||||
|
unstash name: 'xgboost_cli'
|
||||||
|
echo "Test Python CPU ARM64"
|
||||||
|
def container_type = "aarch64"
|
||||||
|
def docker_binary = "docker"
|
||||||
|
sh """
|
||||||
|
${dockerRun} ${container_type} ${docker_binary} tests/ci_build/test_python.sh cpu-arm64
|
||||||
|
"""
|
||||||
|
deleteDir()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
def TestPythonGPU(args) {
|
def TestPythonGPU(args) {
|
||||||
def nodeReq = (args.multi_gpu) ? 'linux && mgpu' : 'linux && gpu'
|
def nodeReq = (args.multi_gpu) ? 'linux && mgpu' : 'linux && gpu'
|
||||||
def artifact_cuda_version = (args.artifact_cuda_version) ?: ref_cuda_ver
|
def artifact_cuda_version = (args.artifact_cuda_version) ?: ref_cuda_ver
|
||||||
|
|||||||
@ -66,8 +66,15 @@ case "$suite" in
|
|||||||
uninstall_xgboost
|
uninstall_xgboost
|
||||||
;;
|
;;
|
||||||
|
|
||||||
|
cpu-arm64)
|
||||||
|
source activate aarch64_test
|
||||||
|
install_xgboost
|
||||||
|
pytest -v -s -rxXs --fulltrace --durations=0 ${args} tests/python/test_basic.py tests/python/test_basic_models.py tests/python/test_model_compatibility.py
|
||||||
|
uninstall_xgboost
|
||||||
|
;;
|
||||||
|
|
||||||
*)
|
*)
|
||||||
echo "Usage: $0 {gpu|mgpu|cpu} [extra args to pass to pytest]"
|
echo "Usage: $0 {gpu|mgpu|cpu|cpu-arm64} [extra args to pass to pytest]"
|
||||||
exit 1
|
exit 1
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user