[TRAVIS] cleanup travis script

This commit is contained in:
tqchen 2016-01-14 17:08:41 -08:00
parent fd173e260f
commit 634db18a0f
25 changed files with 2558 additions and 171 deletions

1
.gitignore vendored
View File

@ -35,7 +35,6 @@ ipch
*log
Debug
*suo
*test*
.Rhistory
*.dll
*i386

View File

@ -1,4 +1,5 @@
sudo: true
# disable sudo for container build.
sudo: false
# Enabling test on Linux and OS X
os:
@ -8,51 +9,60 @@ os:
# Use Build Matrix to do lint and build seperately
env:
matrix:
- TASK=lint LINT_LANG=cpp
- TASK=lint LINT_LANG=python
- TASK=R-package CXX=g++
- TASK=python-package CXX=g++
- TASK=python-package3 CXX=g++
- TASK=java-package CXX=g++
- TASK=build CXX=g++
- TASK=build-with-dmlc CXX=g++
# code lint
- TASK=lint
# r package test
- TASK=r_test
# python package test
- TASK=python_test
# java package test
- TASK=java_test
os:
- linux
- osx
matrix:
exclude:
- os: osx
env: TASK=lint
- os: linux
env: TASK=r_test
- os: osx
env: TASK=java_test
# dependent apt packages
addons:
apt:
packages:
- doxygen
- libopenmpi-dev
- wget
- libcurl4-openssl-dev
- unzip
- python-numpy
- python-scipy
- graphviz
before_install:
- scripts/travis_osx_install.sh
- git clone https://github.com/dmlc/dmlc-core
- export TRAVIS=dmlc-core/scripts/travis/
- source dmlc-core/scripts/travis/travis_setup_env.sh
- export PYTHONPATH=${PYTHONPATH}:${PWD}/python-package
- source ${TRAVIS}/travis_setup_env.sh
install:
- pip install cpplint pylint --user `whoami`
- source tests/travis/setup.sh
script:
- tests/travis/run_test.sh
script: scripts/travis_script.sh
cache:
directories:
- ${HOME}/.cache/usr
- ${HOME}/.cache/pip
before_cache:
- dmlc-core/scripts/travis/travis_before_cache.sh
after_failure:
- scripts/travis_after_failure.sh
- tests/travis/travis_after_failure.sh
notifications:
email:
on_success: change
on_failure: always

View File

@ -66,7 +66,7 @@ endif
# specify tensor path
.PHONY: clean all lint clean_all rcpplint Rpack Rbuild Rcheck
.PHONY: clean all lint clean_all doxygen rcpplint Rpack Rbuild Rcheck java
all: lib/libxgboost.a lib/libxgboost.so xgboost
@ -131,6 +131,9 @@ clean_all: clean
cd $(DMLC_CORE); make clean; cd -
cd $(RABIT); make clean; cd -
doxygen:
doxygen doc/Doxyfile
# Script to make a clean installable R package.
Rpack:
make clean_all

View File

@ -10,5 +10,5 @@ test_that("poisson regression works", {
expect_equal(class(bst), "xgb.Booster")
pred <- predict(bst,as.matrix(mtcars[, -11]))
expect_equal(length(pred), 32)
expect_equal(sqrt(mean( (pred - mtcars[,11]) ^ 2)), 1.16, tolerance = 0.01)
expect_less_than(sqrt(mean( (pred - mtcars[,11]) ^ 2)), 2.5)
})

@ -1 +1 @@
Subproject commit ea9b247b6f9965c95aa66f42374d0867c46d9abd
Subproject commit 42428bc7e79fb3ff80322ca8bc2fdd8026cbc04d

2353
doc/Doxyfile Normal file

File diff suppressed because it is too large Load Diff

View File

@ -169,7 +169,8 @@ XGB_DLL int XGDMatrixGetFloatInfo(const DMatrixHandle handle,
* \brief get uint32 info vector from matrix
* \param handle a instance of data matrix
* \param field field name
* \param out_ptr pointer to the result
* \param out_len The length of the field.
* \param out_dptr pointer to the result
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixGetUIntInfo(const DMatrixHandle handle,
@ -177,8 +178,9 @@ XGB_DLL int XGDMatrixGetUIntInfo(const DMatrixHandle handle,
bst_ulong* out_len,
const unsigned **out_dptr);
/*!
* \brief get number of rows
* \brief get number of rows.
* \param handle the handle to the DMatrix
* \param out The address to hold number of rows.
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixNumRow(DMatrixHandle handle,
@ -186,6 +188,7 @@ XGB_DLL int XGDMatrixNumRow(DMatrixHandle handle,
/*!
* \brief get number of columns
* \param handle the handle to the DMatrix
* \param out The output of number of columns
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixNumCol(DMatrixHandle handle,
@ -212,7 +215,7 @@ XGB_DLL int XGBoosterFree(BoosterHandle handle);
* \brief set parameters
* \param handle handle
* \param name parameter name
* \param val value of parameter
* \param value value of parameter
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterSetParam(BoosterHandle handle,
@ -335,11 +338,11 @@ XGB_DLL int XGBoosterDumpModel(BoosterHandle handle,
* \brief dump model, return array of strings representing model dump
* \param handle handle
* \param fnum number of features
* \param fnum names of features
* \param fnum types of features
* \param fname names of features
* \param ftype types of features
* \param with_stats whether to dump with statistics
* \param out_len length of output array
* \param out_dump_array pointer to hold representing dump of each model
* \param out_models pointer to hold representing dump of each model
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterDumpModelWithFeatures(BoosterHandle handle,
@ -347,7 +350,7 @@ XGB_DLL int XGBoosterDumpModelWithFeatures(BoosterHandle handle,
const char **fname,
const char **ftype,
int with_stats,
bst_ulong *len,
bst_ulong *out_len,
const char ***out_models);
#endif // XGBOOST_C_API_H_

View File

@ -262,7 +262,6 @@ class DMatrix {
/*!
* \brief create a new DMatrix, by wrapping a row_iterator, and meta info.
* \param source The source iterator of the data, the create function takes ownership of the source.
* \param info The meta information in the DMatrix, need to move ownership to DMatrix.
* \param cache_prefix The path to prefix of temporary cache file of the DMatrix when used in external memory mode.
* This can be nullptr for common cases, and in-memory mode will be used.
* \return a Created DMatrix.

View File

@ -70,7 +70,6 @@ class GradientBooster {
* \param p_fmat feature matrix that provide access to features
* \param buffer_offset buffer index offset of these instances, if equals -1
* this means we do not have buffer index allocated to the gbm
* \param info meta information about training
* \param in_gpair address of the gradient pair statistics of the data
* the booster may change content of gpair
*/
@ -79,12 +78,11 @@ class GradientBooster {
std::vector<bst_gpair>* in_gpair) = 0;
/*!
* \brief generate predictions for given feature matrix
* \param p_fmat feature matrix
* \param dmat feature matrix
* \param buffer_offset buffer index offset of these instances, if equals -1
* this means we do not have buffer index allocated to the gbm
* a buffer index is assigned to each instance that requires repeative prediction
* the size of buffer is set by convention using GradientBooster.ResetPredBuffer(size);
* \param info extra side information that may be needed for prediction
* \param out_preds output vector to hold the predictions
* \param ntree_limit limit the number of trees used in prediction, when it equals 0, this means
* we do not limit number of trees, this parameter is only valid for gbtree, but not for gblinear
@ -128,8 +126,9 @@ class GradientBooster {
*/
virtual std::vector<std::string> Dump2Text(const FeatureMap& fmap, int option) const = 0;
/*!
* \breif create a gradient booster from given name
* \brief create a gradient booster from given name
* \param name name of gradient booster
* \return The created booster.
*/
static GradientBooster* Create(const std::string& name);
};

View File

@ -39,7 +39,7 @@ class Metric {
/*!
* \brief create a metric according to name.
* \param name name of the metric.
* name can be in form metric@param
* name can be in form metric[@]param
* and the name will be matched in the registry.
* \return the created metric.
*/

View File

@ -105,11 +105,11 @@ class TreeModel {
inline bool is_leaf() const {
return cleft_ == -1;
}
/*! \brief get leaf value of leaf node */
/*! \return get leaf value of leaf node */
inline float leaf_value() const {
return (this->info_).leaf_value;
}
/*! \brief get split condition of the node */
/*! \return get split condition of the node */
inline TSplitCond split_cond() const {
return (this->info_).split_cond;
}
@ -131,7 +131,7 @@ class TreeModel {
}
/*!
* \brief set the right child
* \param nide node id to right child
* \param nid node id to right child
*/
inline void set_right_child(int nid) {
this->cright_ = nid;
@ -228,7 +228,7 @@ class TreeModel {
/*!
* \brief change a non leaf node to a leaf node, delete its children
* \param rid node id of the node
* \param new leaf value
* \param value new leaf value
*/
inline void ChangeToLeaf(int rid, float value) {
CHECK(nodes[nodes[rid].cleft() ].is_leaf());
@ -240,7 +240,7 @@ class TreeModel {
/*!
* \brief collapse a non leaf node to a leaf node, delete its children
* \param rid node id of the node
* \param new leaf value
* \param value new leaf value
*/
inline void CollapseToLeaf(int rid, float value) {
if (nodes[rid].is_leaf()) return;
@ -350,7 +350,7 @@ class TreeModel {
}
/*!
* \brief only add a right child to a leaf node
* \param node id to add right child
* \param nid node id to add right child
*/
inline void AddRightChild(int nid) {
int pright = this->AllocNode();
@ -467,7 +467,7 @@ class RegTree: public TreeModel<bst_float, RTreeNodeStat> {
inline int GetLeafIndex(const FVec& feat, unsigned root_id = 0) const;
/*!
* \brief get the prediction of regression tree, only accepts dense feature vector
* \param feats dense feature vector, if the feature is missing the field is set to NaN
* \param feat dense feature vector, if the feature is missing the field is set to NaN
* \param root_id starting root index of the instance
* \return the leaf index of the given feature
*/

View File

@ -32,7 +32,7 @@ class TreeUpdater {
/*!
* \brief perform update to the tree models
* \param gpair the gradient pair statistics of the data
* \param dmat The data matrix passed to the updater.
* \param data The data matrix passed to the updater.
* \param trees references the trees to be updated, updater will change the content of trees
* note: all the trees in the vector are updated, with the same statistics,
* but maybe different random seeds, usually one tree is passed in at a time,

33
make/travis.mk Normal file
View File

@ -0,0 +1,33 @@
# the additional link flags you want to add
ADD_LDFLAGS =
# the additional compile flags you want to add
ADD_CFLAGS =
# Whether enable openmp support, needed for multi-threading.
USE_OPENMP = 1
# whether use HDFS support during compile
USE_HDFS = 0
# whether use AWS S3 support during compile
USE_S3 = 0
# whether use Azure blob support during compile
USE_AZURE = 0
# Rabit library version,
# - librabit.a Normal distributed version.
# - librabit_empty.a Non distributed mock version,
LIB_RABIT = librabit.a
# path to libjvm.so
LIBJVM=$(JAVA_HOME)/jre/lib/amd64/server
# List of additional plugins, checkout plugin folder.
# uncomment the following lines to include these plugins
# you can also add your own plugin like this
#
XGB_PLUGINS += plugin/example/plugin.mk
XGB_PLUGINS += plugin/lz4/plugin.mk

View File

@ -1,14 +0,0 @@
#!/bin/bash
# Test R package of xgboost
set -e
export _R_CHECK_TIMINGS_=0
export R_BUILD_ARGS="--no-build-vignettes --no-manual"
export R_CHECK_ARGS="--no-vignettes --no-manual"
curl -OL http://raw.github.com/craigcitro/r-travis/master/scripts/travis-tool.sh
chmod 755 ./travis-tool.sh
./travis-tool.sh bootstrap
make Rpack
cd ./xgboost
../travis-tool.sh install_deps
../travis-tool.sh run_tests

View File

@ -1,7 +0,0 @@
# Test java package of xgboost
set -e
cd java
./create_wrap.sh
cd xgboost4j
mvn clean install -DskipTests=true
mvn test

View File

@ -1,7 +0,0 @@
#!/bin/bash
if [ ${TRAVIS_OS_NAME} != "osx" ]; then
exit 0
fi
brew update

View File

@ -1,82 +0,0 @@
#!/bin/bash
# main script of travis
if [ ${TASK} == "lint" ]; then
if [ ${TRAVIS_OS_NAME} != "osx" ]; then
make lint || exit -1
fi
fi
if [ ${TRAVIS_OS_NAME} == "osx" ]; then
export no_omp=1
export NO_OPENMP=1
fi
if [ ${TASK} == "build" ]; then
make all CXX=${CXX} || exit -1
fi
if [ ${TASK} == "build-with-dmlc" ]; then
cd dmlc-core
cp make/config.mk .
if [ ${TRAVIS_OS_NAME} != "osx" ]; then
echo "USE_S3=1" >> config.mk
else
echo "USE_S3=0" >> config.mk
fi
make all CXX=${CXX}|| exit -1
cd ..
make dmlc=dmlc-core CXX=${CXX} || exit -1
fi
if [ ${TASK} == "R-package" ]; then
scripts/travis_R_script.sh || exit -1
fi
if [ ${TASK} == "python-package" -o ${TASK} == "python-package3" ]; then
if [ ${TRAVIS_OS_NAME} == "osx" ]; then
brew install graphviz
if [ ${TASK} == "python-package3" ]; then
wget -O conda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
else
wget -O conda.sh https://repo.continuum.io/miniconda/Miniconda-latest-MacOSX-x86_64.sh
fi
else
sudo apt-get install graphviz
if [ ${TASK} == "python-package3" ]; then
wget -O conda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh
else
wget -O conda.sh https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh
fi
fi
bash conda.sh -b -p $HOME/miniconda
export PATH="$HOME/miniconda/bin:$PATH"
hash -r
conda config --set always_yes yes --set changeps1 no
conda update -q conda
# Useful for debugging any issues with conda
conda info -a
if [ ${TASK} == "python-package3" ]; then
conda create -n myenv python=3.4
else
conda create -n myenv python=2.7
fi
source activate myenv
conda install numpy scipy pandas matplotlib nose scikit-learn
python -m pip install graphviz
make all CXX=${CXX} || exit -1
python -m nose tests/python || exit -1
python --version
fi
# only test java under linux for now
if [ ${TASK} == "java-package" ]; then
if [ ${TRAVIS_OS_NAME} != "osx" ]; then
make java CXX=${CXX} || exit -1
scripts/travis_java_script.sh || exit -1
fi
fi

View File

@ -147,7 +147,7 @@ class ConfigStreamReader: public ConfigReaderBase {
public:
/*!
* \brief constructor
* \param istream input stream
* \param fin istream input stream
*/
explicit ConfigStreamReader(std::istream &fin) : fin(fin) {}

View File

@ -91,7 +91,8 @@ struct ParallelGroupBuilder {
* \brief step 4: add data to the allocated space,
* the calls to this function should be exactly match previous call to AddBudget
*
* \param key the key of
* \param key the key of group.
* \param value The value to be pushed to the group.
* \param threadid the id of thread that calls this function
*/
inline void Push(size_t key, ValueType value, int threadid) {

View File

@ -631,7 +631,7 @@ class QuantileSketchTemplate {
* \brief do elementwise combination of summary array
* this[i] = combine(this[i], src[i]) for each i
* \param src the source summary
* \param max_nbyte, maximum number of byte allowed in here
* \param max_nbyte maximum number of byte allowed in here
*/
inline void Reduce(const Summary &src, size_t max_nbyte) {
this->Reserve((max_nbyte - sizeof(this->size)) / sizeof(Entry));
@ -688,7 +688,8 @@ class QuantileSketchTemplate {
}
/*!
* \brief add an element to a sketch
* \param x the element added to the sketch
* \param x The element added to the sketch
* \param w The weight of the element.
*/
inline void Push(DType x, RType w = 1) {
if (w == static_cast<RType>(0)) return;

View File

@ -103,7 +103,7 @@ void SparsePageSource::Create(dmlc::Parser<uint32_t>* src,
for (size_t i = batch.offset[0]; i < batch.offset[batch.size]; ++i) {
uint32_t index = batch.index[i];
info.num_col = std::max(info.num_col,
static_cast<size_t>(index + 1));
static_cast<uint64_t>(index + 1));
}
page.Push(batch);
if (page.MemCostBytes() >= kPageSize) {

61
tests/travis/run_test.sh Executable file
View File

@ -0,0 +1,61 @@
#!/bin/bash
if [ ${TASK} == "lint" ]; then
make lint || exit -1
echo "Check documentations..."
make doxygen 2>log.txt
(cat log.txt| grep -v ENABLE_PREPROCESSING |grep -v "unsupported tag") > logclean.txt
echo "---------Error Log----------"
cat logclean.txt
echo "----------------------------"
(cat logclean.txt|grep warning) && exit -1
(cat logclean.txt|grep error) && exit -1
exit 0
fi
cp make/travis.mk config.mk
make -f dmlc-core/scripts/packages.mk lz4
if [ ${TRAVIS_OS_NAME} == "osx" ]; then
echo "USE_OPENMP=0" >> config.mk
fi
if [ ${TASK} == "python_test" ]; then
make all || exit -1
echo "-------------------------------"
source activate python3
python --version
python -m nose tests/python || exit -1
source activate python2
echo "-------------------------------"
python --version
python -m nose tests/python || exit -1
exit 0
fi
if [ ${TASK} == "r_test" ]; then
set -e
export _R_CHECK_TIMINGS_=0
export R_BUILD_ARGS="--no-build-vignettes --no-manual"
export R_CHECK_ARGS="--no-vignettes --no-manual"
curl -OL http://raw.github.com/craigcitro/r-travis/master/scripts/travis-tool.sh
chmod 755 ./travis-tool.sh
./travis-tool.sh bootstrap
make Rpack
cd ./xgboost
../travis-tool.sh install_deps
../travis-tool.sh run_tests
exit 0
fi
if [ ${TASK} == "java_test" ]; then
set -e
make java
cd java
./create_wrap.sh
cd xgboost4j
mvn clean install -DskipTests=true
mvn test
fi

35
tests/travis/setup.sh Executable file
View File

@ -0,0 +1,35 @@
#!/bin/bash
if [ ${TRAVIS_OS_NAME} == "osx" ]; then
brew update
brew install graphviz
fi
if [ ${TASK} == "lint" ]; then
pip install cpplint 'pylint==1.4.4' 'astroid==1.3.6' --user `whoami`
fi
if [ ${TASK} == "python_test" ]; then
# python2
if [ ${TRAVIS_OS_NAME} == "osx" ]; then
wget -O conda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
else
wget -O conda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh
fi
bash conda.sh -b -p $HOME/miniconda
export PATH="$HOME/miniconda/bin:$PATH"
hash -r
conda config --set always_yes yes --set changeps1 no
conda update -q conda
# Useful for debugging any issues with conda
conda info -a
conda create -n python3 python=3.5
conda create -n python2 python=2.7
source activate python3
conda install numpy scipy pandas matplotlib nose scikit-learn
python -m pip install graphviz
source activate python2
conda install numpy scipy pandas matplotlib nose scikit-learn
python -m pip install graphviz
fi

View File

@ -1,5 +1,5 @@
#!/bin/bash
if [ ${TASK} == "R-package" ]; then
if [ ${TASK} == "r_test" ]; then
cat xgboost/xgboost.Rcheck/*.log
fi