Swap byte-order in binary serializer to support big-endian arch (#5813)
* fixed some endian issues * Use dmlc::ByteSwap() to simplify code * Fix lint check * [CI] Add test for s390x * Download latest CMake on s390x * Fix a bug in my code * Save magic number in dmatrix with byteswap on big-endian machine * Save version in binary with byteswap on big-endian machine * Load scalar with byteswap in MetaInfo * Add a debugging message * Handle arrays correctly when byteswapping * EOF can also be 255 * Handle magic number in MetaInfo carefully * Skip Tree.Load test for big-endian, since the test manually builds little-endian binary model * Handle missing packages in Python tests * Don't use boto3 in model compatibility tests * Add s390 Docker file for local testing * Add model compatibility tests * Add R compatibility test * Revert "Add R compatibility test" This reverts commit c2d2bdcb7dbae133cbb927fcd20f7e83ee2b18a8. Co-authored-by: Qi Zhang <q.zhang@ibm.com> Co-authored-by: Hyunsu Cho <chohyu01@cs.washington.edu>
This commit is contained in:
27
tests/ci_build/Dockerfile.s390x
Normal file
27
tests/ci_build/Dockerfile.s390x
Normal file
@@ -0,0 +1,27 @@
|
||||
FROM s390x/ubuntu:20.04
|
||||
|
||||
# Environment
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
SHELL ["/bin/bash", "-c"] # Use Bash as shell
|
||||
|
||||
# Install all basic requirements
|
||||
RUN \
|
||||
apt-get update && \
|
||||
apt-get install -y --no-install-recommends tar unzip wget git build-essential ninja-build \
|
||||
cmake time python3 python3-pip python3-numpy python3-scipy python3-sklearn r-base && \
|
||||
python3 -m pip install pytest hypothesis
|
||||
|
||||
ENV GOSU_VERSION 1.10
|
||||
|
||||
# Install lightweight sudo (not bound to TTY)
|
||||
RUN set -ex; \
|
||||
wget -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-amd64" && \
|
||||
chmod +x /usr/local/bin/gosu && \
|
||||
gosu nobody true
|
||||
|
||||
# Default entry-point to use if running locally
|
||||
# It will preserve attributes of created files
|
||||
COPY entrypoint.sh /scripts/
|
||||
|
||||
WORKDIR /workspace
|
||||
ENTRYPOINT ["/scripts/entrypoint.sh"]
|
||||
@@ -453,7 +453,8 @@ TEST(Json, Invalid) {
|
||||
Json load{Json::Load(StringView(str.c_str(), str.size()))};
|
||||
} catch (dmlc::Error const &e) {
|
||||
std::string msg = e.what();
|
||||
ASSERT_NE(msg.find("EOF"), std::string::npos);
|
||||
ASSERT_TRUE(msg.find("EOF") != std::string::npos
|
||||
|| msg.find("255") != std::string::npos); // EOF is printed as 255 on s390x
|
||||
has_thrown = true;
|
||||
};
|
||||
ASSERT_TRUE(has_thrown);
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
#include "xgboost/json_io.h"
|
||||
|
||||
namespace xgboost {
|
||||
#if DMLC_IO_NO_ENDIAN_SWAP // skip on big-endian machines
|
||||
// Manually construct tree in binary format
|
||||
// Do not use structs in case they change
|
||||
// We want to preserve backwards compatibility
|
||||
@@ -85,6 +86,7 @@ TEST(Tree, Load) {
|
||||
EXPECT_EQ(tree[1].LeafValue(), 0.1f);
|
||||
EXPECT_TRUE(tree[1].IsLeaf());
|
||||
}
|
||||
#endif // DMLC_IO_NO_ENDIAN_SWAP
|
||||
|
||||
TEST(Tree, AllocateNode) {
|
||||
RegTree tree;
|
||||
|
||||
@@ -109,6 +109,8 @@ def test_evals_result_demo():
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
|
||||
@pytest.mark.skipif(**tm.no_sklearn())
|
||||
@pytest.mark.skipif(**tm.no_pandas())
|
||||
def test_aft_demo():
|
||||
script = os.path.join(DEMO_DIR, 'aft_survival', 'aft_survival_demo.py')
|
||||
cmd = ['python', script]
|
||||
|
||||
@@ -82,6 +82,7 @@ class TestEarlyStopping(unittest.TestCase):
|
||||
self.assert_metrics_length(cv, 1)
|
||||
|
||||
@pytest.mark.skipif(**tm.no_sklearn())
|
||||
@pytest.mark.skipif(**tm.no_pandas())
|
||||
def test_cv_early_stopping_with_multiple_eval_sets_and_metrics(self):
|
||||
from sklearn.datasets import load_breast_cancer
|
||||
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import xgboost
|
||||
import os
|
||||
import generate_models as gm
|
||||
import testing as tm
|
||||
import json
|
||||
import zipfile
|
||||
import pytest
|
||||
import copy
|
||||
import urllib.request
|
||||
|
||||
|
||||
def run_model_param_check(config):
|
||||
@@ -87,6 +89,7 @@ def run_scikit_model_check(name, path):
|
||||
assert False
|
||||
|
||||
|
||||
@pytest.mark.skipif(**tm.no_sklearn())
|
||||
def test_model_compatibility():
|
||||
'''Test model compatibility, can only be run on CI as others don't
|
||||
have the credentials.
|
||||
@@ -94,17 +97,9 @@ def test_model_compatibility():
|
||||
'''
|
||||
path = os.path.dirname(os.path.abspath(__file__))
|
||||
path = os.path.join(path, 'models')
|
||||
try:
|
||||
import boto3
|
||||
import botocore
|
||||
except ImportError:
|
||||
pytest.skip(
|
||||
'Skiping compatibility tests as boto3 is not installed.')
|
||||
|
||||
s3_bucket = boto3.resource('s3').Bucket('xgboost-ci-jenkins-artifacts')
|
||||
zip_path = 'xgboost_model_compatibility_test.zip'
|
||||
s3_bucket.download_file(zip_path, zip_path)
|
||||
|
||||
zip_path, _ = urllib.request.urlretrieve('https://xgboost-ci-jenkins-artifacts.s3-us-west-2' +
|
||||
'.amazonaws.com/xgboost_model_compatibility_test.zip')
|
||||
with zipfile.ZipFile(zip_path, 'r') as z:
|
||||
z.extractall(path)
|
||||
|
||||
|
||||
@@ -2,13 +2,17 @@
|
||||
import os
|
||||
from xgboost.compat import SKLEARN_INSTALLED, PANDAS_INSTALLED
|
||||
from xgboost.compat import DASK_INSTALLED
|
||||
import pytest
|
||||
import tempfile
|
||||
import xgboost as xgb
|
||||
import numpy as np
|
||||
|
||||
hypothesis = pytest.importorskip('hypothesis')
|
||||
sklearn = pytest.importorskip('sklearn')
|
||||
from hypothesis import strategies
|
||||
from hypothesis.extra.numpy import arrays
|
||||
from joblib import Memory
|
||||
from sklearn import datasets
|
||||
import tempfile
|
||||
import xgboost as xgb
|
||||
import numpy as np
|
||||
|
||||
try:
|
||||
import cupy as cp
|
||||
|
||||
@@ -88,3 +88,19 @@ if [ ${TASK} == "cmake_test" ]; then
|
||||
cd ..
|
||||
rm -rf build
|
||||
fi
|
||||
|
||||
if [ ${TASK} == "s390x_test" ]; then
|
||||
set -e
|
||||
|
||||
# Build and run C++ tests
|
||||
rm -rf build
|
||||
mkdir build && cd build
|
||||
cmake .. -DCMAKE_VERBOSE_MAKEFILE=ON -DGOOGLE_TEST=ON -DUSE_OPENMP=ON -DUSE_DMLC_GTEST=ON -GNinja
|
||||
time ninja -v
|
||||
./testxgboost
|
||||
|
||||
# Run model compatibility tests
|
||||
cd ..
|
||||
python3 -m pip install --user pytest hypothesis
|
||||
PYTHONPATH=./python-package python3 -m pytest --fulltrace -v -rxXs tests/python/ -k 'test_model'
|
||||
fi
|
||||
|
||||
@@ -20,6 +20,15 @@ if [ ${TASK} == "cmake_test" ] && [ ${TRAVIS_OS_NAME} == "osx" ]; then
|
||||
sudo softwareupdate -i "Command Line Tools (macOS High Sierra version 10.13) for Xcode-9.3"
|
||||
fi
|
||||
|
||||
if [ ${TASK} == "s390x_test" ] && [ ${TRAVIS_CPU_ARCH} == "s390x" ]; then
|
||||
sudo snap install cmake --channel=3.17/beta --classic
|
||||
export PATH=/snap/bin:${PATH}
|
||||
cmake --version
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends tar unzip wget git build-essential ninja-build \
|
||||
time python3 python3-pip python3-numpy python3-scipy python3-sklearn r-base
|
||||
fi
|
||||
|
||||
if [ ${TASK} == "python_sdist_test" ] && [ ${TRAVIS_OS_NAME} == "linux" ]; then
|
||||
wget https://github.com/Kitware/CMake/releases/download/v3.17.1/cmake-3.17.1-Linux-x86_64.sh
|
||||
sudo bash cmake-3.17.1-Linux-x86_64.sh --prefix=/usr/local --skip-license
|
||||
|
||||
Reference in New Issue
Block a user