rename xgboosterror

This commit is contained in:
yanqingmen 2015-07-06 17:55:13 -07:00
parent f73bcd427d
commit 4d382a8cc1
15 changed files with 111 additions and 111 deletions

View File

@ -31,7 +31,7 @@ import org.dmlc.xgboost4j.DMatrix;
import org.dmlc.xgboost4j.demo.util.DataLoader;
import org.dmlc.xgboost4j.demo.util.Params;
import org.dmlc.xgboost4j.util.Trainer;
import org.dmlc.xgboost4j.util.XgboostError;
import org.dmlc.xgboost4j.util.XGBoostError;
/**
* a simple example of java wrapper for xgboost
@ -53,7 +53,7 @@ public class BasicWalkThrough {
}
public static void main(String[] args) throws UnsupportedEncodingException, IOException, XgboostError {
public static void main(String[] args) throws UnsupportedEncodingException, IOException, XGBoostError {
// load file from text file, also binary buffer generated by xgboost4j
DMatrix trainMat = new DMatrix("../../demo/data/agaricus.txt.train");
DMatrix testMat = new DMatrix("../../demo/data/agaricus.txt.test");

View File

@ -23,14 +23,14 @@ import org.dmlc.xgboost4j.Booster;
import org.dmlc.xgboost4j.DMatrix;
import org.dmlc.xgboost4j.demo.util.Params;
import org.dmlc.xgboost4j.util.Trainer;
import org.dmlc.xgboost4j.util.XgboostError;
import org.dmlc.xgboost4j.util.XGBoostError;
/**
* example for start from a initial base prediction
* @author hzx
*/
public class BoostFromPrediction {
public static void main(String[] args) throws XgboostError {
public static void main(String[] args) throws XGBoostError {
System.out.println("start running example to start from a initial prediction");
// load file from text file, also binary buffer generated by xgboost4j

View File

@ -19,14 +19,14 @@ import java.io.IOException;
import org.dmlc.xgboost4j.DMatrix;
import org.dmlc.xgboost4j.util.Trainer;
import org.dmlc.xgboost4j.demo.util.Params;
import org.dmlc.xgboost4j.util.XgboostError;
import org.dmlc.xgboost4j.util.XGBoostError;
/**
* an example of cross validation
* @author hzx
*/
public class CrossValidation {
public static void main(String[] args) throws IOException, XgboostError {
public static void main(String[] args) throws IOException, XGBoostError {
//load train mat
DMatrix trainMat = new DMatrix("../../demo/data/agaricus.txt.train");

View File

@ -27,7 +27,7 @@ import org.dmlc.xgboost4j.DMatrix;
import org.dmlc.xgboost4j.IObjective;
import org.dmlc.xgboost4j.demo.util.Params;
import org.dmlc.xgboost4j.util.Trainer;
import org.dmlc.xgboost4j.util.XgboostError;
import org.dmlc.xgboost4j.util.XGBoostError;
/**
* an example user define objective and eval
@ -74,7 +74,7 @@ public class CustomObjective {
float[] labels;
try {
labels = dtrain.getLabel();
} catch (XgboostError ex) {
} catch (XGBoostError ex) {
logger.error(ex);
return null;
}
@ -122,7 +122,7 @@ public class CustomObjective {
float[] labels;
try {
labels = dmat.getLabel();
} catch (XgboostError ex) {
} catch (XGBoostError ex) {
logger.error(ex);
return -1f;
}
@ -140,7 +140,7 @@ public class CustomObjective {
}
}
public static void main(String[] args) throws XgboostError {
public static void main(String[] args) throws XGBoostError {
//load train mat (svmlight format)
DMatrix trainMat = new DMatrix("../../demo/data/agaricus.txt.train");
//load valid mat (svmlight format)

View File

@ -23,14 +23,14 @@ import org.dmlc.xgboost4j.Booster;
import org.dmlc.xgboost4j.DMatrix;
import org.dmlc.xgboost4j.demo.util.Params;
import org.dmlc.xgboost4j.util.Trainer;
import org.dmlc.xgboost4j.util.XgboostError;
import org.dmlc.xgboost4j.util.XGBoostError;
/**
* simple example for using external memory version
* @author hzx
*/
public class ExternalMemory {
public static void main(String[] args) throws XgboostError {
public static void main(String[] args) throws XGBoostError {
//this is the only difference, add a # followed by a cache prefix name
//several cache file with the prefix will be generated
//currently only support convert from libsvm file

View File

@ -24,7 +24,7 @@ import org.dmlc.xgboost4j.DMatrix;
import org.dmlc.xgboost4j.demo.util.CustomEval;
import org.dmlc.xgboost4j.demo.util.Params;
import org.dmlc.xgboost4j.util.Trainer;
import org.dmlc.xgboost4j.util.XgboostError;
import org.dmlc.xgboost4j.util.XGBoostError;
/**
* this is an example of fit generalized linear model in xgboost
@ -32,7 +32,7 @@ import org.dmlc.xgboost4j.util.XgboostError;
* @author hzx
*/
public class GeneralizedLinearModel {
public static void main(String[] args) throws XgboostError {
public static void main(String[] args) throws XGBoostError {
// load file from text file, also binary buffer generated by xgboost4j
DMatrix trainMat = new DMatrix("../../demo/data/agaricus.txt.train");
DMatrix testMat = new DMatrix("../../demo/data/agaricus.txt.test");

View File

@ -25,14 +25,14 @@ import org.dmlc.xgboost4j.util.Trainer;
import org.dmlc.xgboost4j.demo.util.CustomEval;
import org.dmlc.xgboost4j.demo.util.Params;
import org.dmlc.xgboost4j.util.XgboostError;
import org.dmlc.xgboost4j.util.XGBoostError;
/**
* predict first ntree
* @author hzx
*/
public class PredictFirstNtree {
public static void main(String[] args) throws XgboostError {
public static void main(String[] args) throws XGBoostError {
// load file from text file, also binary buffer generated by xgboost4j
DMatrix trainMat = new DMatrix("../../demo/data/agaricus.txt.train");
DMatrix testMat = new DMatrix("../../demo/data/agaricus.txt.test");

View File

@ -24,14 +24,14 @@ import org.dmlc.xgboost4j.Booster;
import org.dmlc.xgboost4j.DMatrix;
import org.dmlc.xgboost4j.util.Trainer;
import org.dmlc.xgboost4j.demo.util.Params;
import org.dmlc.xgboost4j.util.XgboostError;
import org.dmlc.xgboost4j.util.XGBoostError;
/**
* predict leaf indices
* @author hzx
*/
public class PredictLeafIndices {
public static void main(String[] args) throws XgboostError {
public static void main(String[] args) throws XGBoostError {
// load file from text file, also binary buffer generated by xgboost4j
DMatrix trainMat = new DMatrix("../../demo/data/agaricus.txt.train");
DMatrix testMat = new DMatrix("../../demo/data/agaricus.txt.test");

View File

@ -19,7 +19,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.dmlc.xgboost4j.DMatrix;
import org.dmlc.xgboost4j.IEvaluation;
import org.dmlc.xgboost4j.util.XgboostError;
import org.dmlc.xgboost4j.util.XGBoostError;
/**
* a util evaluation class for examples
@ -41,7 +41,7 @@ public class CustomEval implements IEvaluation {
float[] labels;
try {
labels = dmat.getLabel();
} catch (XgboostError ex) {
} catch (XGBoostError ex) {
logger.error(ex);
return -1f;
}

View File

@ -31,7 +31,7 @@ import org.apache.commons.logging.LogFactory;
import org.dmlc.xgboost4j.util.Initializer;
import org.dmlc.xgboost4j.util.ErrorHandle;
import org.dmlc.xgboost4j.util.XgboostError;
import org.dmlc.xgboost4j.util.XGBoostError;
import org.dmlc.xgboost4j.wrapper.XgboostJNI;
@ -59,9 +59,9 @@ public final class Booster {
* init Booster from dMatrixs
* @param params parameters
* @param dMatrixs DMatrix array
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public Booster(Iterable<Entry<String, Object>> params, DMatrix[] dMatrixs) throws XgboostError {
public Booster(Iterable<Entry<String, Object>> params, DMatrix[] dMatrixs) throws XGBoostError {
init(dMatrixs);
setParam("seed","0");
setParams(params);
@ -73,9 +73,9 @@ public final class Booster {
* load model from modelPath
* @param params parameters
* @param modelPath booster modelPath (model generated by booster.saveModel)
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public Booster(Iterable<Entry<String, Object>> params, String modelPath) throws XgboostError {
public Booster(Iterable<Entry<String, Object>> params, String modelPath) throws XGBoostError {
long[] out = new long[1];
init(null);
loadModel(modelPath);
@ -86,7 +86,7 @@ public final class Booster {
private void init(DMatrix[] dMatrixs) throws XgboostError {
private void init(DMatrix[] dMatrixs) throws XGBoostError {
long[] handles = null;
if(dMatrixs != null) {
handles = dMatrixs2handles(dMatrixs);
@ -101,18 +101,18 @@ public final class Booster {
* set parameter
* @param key param name
* @param value param value
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public final void setParam(String key, String value) throws XgboostError {
public final void setParam(String key, String value) throws XGBoostError {
ErrorHandle.checkCall(XgboostJNI.XGBoosterSetParam(handle, key, value));
}
/**
* set parameters
* @param params parameters key-value map
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public void setParams(Iterable<Entry<String, Object>> params) throws XgboostError {
public void setParams(Iterable<Entry<String, Object>> params) throws XGBoostError {
if(params!=null) {
for(Map.Entry<String, Object> entry : params) {
setParam(entry.getKey(), entry.getValue().toString());
@ -125,9 +125,9 @@ public final class Booster {
* Update (one iteration)
* @param dtrain training data
* @param iter current iteration number
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public void update(DMatrix dtrain, int iter) throws XgboostError {
public void update(DMatrix dtrain, int iter) throws XGBoostError {
ErrorHandle.checkCall(XgboostJNI.XGBoosterUpdateOneIter(handle, iter, dtrain.getHandle()));
}
@ -136,9 +136,9 @@ public final class Booster {
* @param dtrain training data
* @param iter current iteration number
* @param obj customized objective class
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public void update(DMatrix dtrain, int iter, IObjective obj) throws XgboostError {
public void update(DMatrix dtrain, int iter, IObjective obj) throws XGBoostError {
float[][] predicts = predict(dtrain, true);
List<float[]> gradients = obj.getGradient(predicts, dtrain);
boost(dtrain, gradients.get(0), gradients.get(1));
@ -149,9 +149,9 @@ public final class Booster {
* @param dtrain training data
* @param grad first order of gradient
* @param hess seconde order of gradient
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public void boost(DMatrix dtrain, float[] grad, float[] hess) throws XgboostError {
public void boost(DMatrix dtrain, float[] grad, float[] hess) throws XGBoostError {
if(grad.length != hess.length) {
throw new AssertionError(String.format("grad/hess length mismatch %s / %s", grad.length, hess.length));
}
@ -164,9 +164,9 @@ public final class Booster {
* @param evalNames name for eval dmatrixs, used for check results
* @param iter current eval iteration
* @return eval information
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public String evalSet(DMatrix[] evalMatrixs, String[] evalNames, int iter) throws XgboostError {
public String evalSet(DMatrix[] evalMatrixs, String[] evalNames, int iter) throws XGBoostError {
long[] handles = dMatrixs2handles(evalMatrixs);
String[] evalInfo = new String[1];
ErrorHandle.checkCall(XgboostJNI.XGBoosterEvalOneIter(handle, iter, handles, evalNames, evalInfo));
@ -180,9 +180,9 @@ public final class Booster {
* @param iter
* @param eval
* @return eval information
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public String evalSet(DMatrix[] evalMatrixs, String[] evalNames, int iter, IEvaluation eval) throws XgboostError {
public String evalSet(DMatrix[] evalMatrixs, String[] evalNames, int iter, IEvaluation eval) throws XGBoostError {
String evalInfo = "";
for(int i=0; i<evalNames.length; i++) {
String evalName = evalNames[i];
@ -200,9 +200,9 @@ public final class Booster {
* @param evalNames name for eval dmatrixs, used for check results
* @param iter current eval iteration
* @return eval information
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public String evalSet(long[] dHandles, String[] evalNames, int iter) throws XgboostError {
public String evalSet(long[] dHandles, String[] evalNames, int iter) throws XGBoostError {
String[] evalInfo = new String[1];
ErrorHandle.checkCall(XgboostJNI.XGBoosterEvalOneIter(handle, iter, dHandles, evalNames, evalInfo));
return evalInfo[0];
@ -215,9 +215,9 @@ public final class Booster {
* @param evalName
* @param iter
* @return eval information
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public String eval(DMatrix evalMat, String evalName, int iter) throws XgboostError {
public String eval(DMatrix evalMat, String evalName, int iter) throws XGBoostError {
DMatrix[] evalMats = new DMatrix[] {evalMat};
String[] evalNames = new String[] {evalName};
return evalSet(evalMats, evalNames, iter);
@ -231,7 +231,7 @@ public final class Booster {
* @param predLeaf
* @return predict results
*/
private synchronized float[][] pred(DMatrix data, boolean outPutMargin, long treeLimit, boolean predLeaf) throws XgboostError {
private synchronized float[][] pred(DMatrix data, boolean outPutMargin, long treeLimit, boolean predLeaf) throws XGBoostError {
int optionMask = 0;
if(outPutMargin) {
optionMask = 1;
@ -257,9 +257,9 @@ public final class Booster {
* Predict with data
* @param data dmatrix storing the input
* @return predict result
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public float[][] predict(DMatrix data) throws XgboostError {
public float[][] predict(DMatrix data) throws XGBoostError {
return pred(data, false, 0, false);
}
@ -268,9 +268,9 @@ public final class Booster {
* @param data dmatrix storing the input
* @param outPutMargin Whether to output the raw untransformed margin value.
* @return predict result
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public float[][] predict(DMatrix data, boolean outPutMargin) throws XgboostError {
public float[][] predict(DMatrix data, boolean outPutMargin) throws XGBoostError {
return pred(data, outPutMargin, 0, false);
}
@ -280,9 +280,9 @@ public final class Booster {
* @param outPutMargin Whether to output the raw untransformed margin value.
* @param treeLimit Limit number of trees in the prediction; defaults to 0 (use all trees).
* @return predict result
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public float[][] predict(DMatrix data, boolean outPutMargin, long treeLimit) throws XgboostError {
public float[][] predict(DMatrix data, boolean outPutMargin, long treeLimit) throws XGBoostError {
return pred(data, outPutMargin, treeLimit, false);
}
@ -295,9 +295,9 @@ public final class Booster {
Note that the leaf index of a tree is unique per tree, so you may find leaf 1
in both tree 1 and tree 0.
* @return predict result
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public float[][] predict(DMatrix data , long treeLimit, boolean predLeaf) throws XgboostError {
public float[][] predict(DMatrix data , long treeLimit, boolean predLeaf) throws XGBoostError {
return pred(data, false, treeLimit, predLeaf);
}
@ -317,9 +317,9 @@ public final class Booster {
* get the dump of the model as a string array
* @param withStats Controls whether the split statistics are output.
* @return dumped model information
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public String[] getDumpInfo(boolean withStats) throws XgboostError {
public String[] getDumpInfo(boolean withStats) throws XGBoostError {
int statsFlag = 0;
if(withStats) {
statsFlag = 1;
@ -334,9 +334,9 @@ public final class Booster {
* @param featureMap featureMap file
* @param withStats Controls whether the split statistics are output.
* @return dumped model information
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public String[] getDumpInfo(String featureMap, boolean withStats) throws XgboostError {
public String[] getDumpInfo(String featureMap, boolean withStats) throws XGBoostError {
int statsFlag = 0;
if(withStats) {
statsFlag = 1;
@ -354,9 +354,9 @@ public final class Booster {
* @throws FileNotFoundException
* @throws UnsupportedEncodingException
* @throws IOException
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public void dumpModel(String modelPath, boolean withStats) throws FileNotFoundException, UnsupportedEncodingException, IOException, XgboostError {
public void dumpModel(String modelPath, boolean withStats) throws FileNotFoundException, UnsupportedEncodingException, IOException, XGBoostError {
File tf = new File(modelPath);
FileOutputStream out = new FileOutputStream(tf);
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8"));
@ -381,9 +381,9 @@ public final class Booster {
* @throws FileNotFoundException
* @throws UnsupportedEncodingException
* @throws IOException
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public void dumpModel(String modelPath, String featureMap, boolean withStats) throws FileNotFoundException, UnsupportedEncodingException, IOException, XgboostError {
public void dumpModel(String modelPath, String featureMap, boolean withStats) throws FileNotFoundException, UnsupportedEncodingException, IOException, XGBoostError {
File tf = new File(modelPath);
FileOutputStream out = new FileOutputStream(tf);
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(out, "UTF-8"));
@ -402,9 +402,9 @@ public final class Booster {
/**
* get importance of each feature
* @return featureMap key: feature index, value: feature importance score
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public Map<String, Integer> getFeatureScore() throws XgboostError {
public Map<String, Integer> getFeatureScore() throws XGBoostError {
String[] modelInfos = getDumpInfo(false);
Map<String, Integer> featureScore = new HashMap<>();
for(String tree : modelInfos) {
@ -431,9 +431,9 @@ public final class Booster {
* get importance of each feature
* @param featureMap file to save dumped model info
* @return featureMap key: feature index, value: feature importance score
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public Map<String, Integer> getFeatureScore(String featureMap) throws XgboostError {
public Map<String, Integer> getFeatureScore(String featureMap) throws XGBoostError {
String[] modelInfos = getDumpInfo(featureMap, false);
Map<String, Integer> featureScore = new HashMap<>();
for(String tree : modelInfos) {

View File

@ -19,7 +19,7 @@ import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.dmlc.xgboost4j.util.ErrorHandle;
import org.dmlc.xgboost4j.util.XgboostError;
import org.dmlc.xgboost4j.util.XGBoostError;
import org.dmlc.xgboost4j.util.Initializer;
import org.dmlc.xgboost4j.wrapper.XgboostJNI;
@ -52,9 +52,9 @@ public class DMatrix {
/**
* init DMatrix from file (svmlight format)
* @param dataPath
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public DMatrix(String dataPath) throws XgboostError {
public DMatrix(String dataPath) throws XGBoostError {
long[] out = new long[1];
ErrorHandle.checkCall(XgboostJNI.XGDMatrixCreateFromFile(dataPath, 1, out));
handle = out[0];
@ -66,9 +66,9 @@ public class DMatrix {
* @param indices Indices (colIndexs for CSR or rowIndexs for CSC)
* @param data non zero values (sequence by row for CSR or by col for CSC)
* @param st sparse matrix type (CSR or CSC)
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public DMatrix(long[] headers, int[] indices, float[] data, SparseType st) throws XgboostError {
public DMatrix(long[] headers, int[] indices, float[] data, SparseType st) throws XGBoostError {
long[] out = new long[1];
if(st == SparseType.CSR) {
ErrorHandle.checkCall(XgboostJNI.XGDMatrixCreateFromCSR(headers, indices, data, out));
@ -87,9 +87,9 @@ public class DMatrix {
* @param data data values
* @param nrow number of rows
* @param ncol number of columns
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public DMatrix(float[] data, int nrow, int ncol) throws XgboostError {
public DMatrix(float[] data, int nrow, int ncol) throws XGBoostError {
long[] out = new long[1];
ErrorHandle.checkCall(XgboostJNI.XGDMatrixCreateFromMat(data, nrow, ncol, 0.0f, out));
handle = out[0];
@ -109,16 +109,16 @@ public class DMatrix {
* set label of dmatrix
* @param labels
*/
public void setLabel(float[] labels) throws XgboostError {
public void setLabel(float[] labels) throws XGBoostError {
ErrorHandle.checkCall(XgboostJNI.XGDMatrixSetFloatInfo(handle, "label", labels));
}
/**
* set weight of each instance
* @param weights
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public void setWeight(float[] weights) throws XgboostError {
public void setWeight(float[] weights) throws XGBoostError {
ErrorHandle.checkCall(XgboostJNI.XGDMatrixSetFloatInfo(handle, "weight", weights));
}
@ -126,9 +126,9 @@ public class DMatrix {
* if specified, xgboost will start from this init margin
* can be used to specify initial prediction to boost from
* @param baseMargin
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public void setBaseMargin(float[] baseMargin) throws XgboostError {
public void setBaseMargin(float[] baseMargin) throws XGBoostError {
ErrorHandle.checkCall(XgboostJNI.XGDMatrixSetFloatInfo(handle, "base_margin", baseMargin));
}
@ -136,9 +136,9 @@ public class DMatrix {
* if specified, xgboost will start from this init margin
* can be used to specify initial prediction to boost from
* @param baseMargin
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public void setBaseMargin(float[][] baseMargin) throws XgboostError {
public void setBaseMargin(float[][] baseMargin) throws XGBoostError {
float[] flattenMargin = flatten(baseMargin);
setBaseMargin(flattenMargin);
}
@ -146,19 +146,19 @@ public class DMatrix {
/**
* Set group sizes of DMatrix (used for ranking)
* @param group
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public void setGroup(int[] group) throws XgboostError {
public void setGroup(int[] group) throws XGBoostError {
ErrorHandle.checkCall(XgboostJNI.XGDMatrixSetGroup(handle, group));
}
private float[] getFloatInfo(String field) throws XgboostError {
private float[] getFloatInfo(String field) throws XGBoostError {
float[][] infos = new float[1][];
ErrorHandle.checkCall(XgboostJNI.XGDMatrixGetFloatInfo(handle, field, infos));
return infos[0];
}
private int[] getIntInfo(String field) throws XgboostError {
private int[] getIntInfo(String field) throws XGBoostError {
int[][] infos = new int[1][];
ErrorHandle.checkCall(XgboostJNI.XGDMatrixGetUIntInfo(handle, field, infos));
return infos[0];
@ -167,27 +167,27 @@ public class DMatrix {
/**
* get label values
* @return label
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public float[] getLabel() throws XgboostError {
public float[] getLabel() throws XGBoostError {
return getFloatInfo("label");
}
/**
* get weight of the DMatrix
* @return weights
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public float[] getWeight() throws XgboostError {
public float[] getWeight() throws XGBoostError {
return getFloatInfo("weight");
}
/**
* get base margin of the DMatrix
* @return base margin
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public float[] getBaseMargin() throws XgboostError {
public float[] getBaseMargin() throws XGBoostError {
return getFloatInfo("base_margin");
}
@ -195,9 +195,9 @@ public class DMatrix {
* Slice the DMatrix and return a new DMatrix that only contains `rowIndex`.
* @param rowIndex
* @return sliced new DMatrix
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public DMatrix slice(int[] rowIndex) throws XgboostError {
public DMatrix slice(int[] rowIndex) throws XGBoostError {
long[] out = new long[1];
ErrorHandle.checkCall(XgboostJNI.XGDMatrixSliceDMatrix(handle, rowIndex, out));
long sHandle = out[0];
@ -208,9 +208,9 @@ public class DMatrix {
/**
* get the row number of DMatrix
* @return number of rows
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public long rowNum() throws XgboostError {
public long rowNum() throws XGBoostError {
long[] rowNum = new long[1];
ErrorHandle.checkCall(XgboostJNI.XGDMatrixNumRow(handle,rowNum));
return rowNum[0];

View File

@ -37,9 +37,9 @@ public class CVPack {
* @param dtrain train data
* @param dtest test data
* @param params parameters
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public CVPack(DMatrix dtrain, DMatrix dtest, Iterable<Map.Entry<String, Object>> params) throws XgboostError {
public CVPack(DMatrix dtrain, DMatrix dtest, Iterable<Map.Entry<String, Object>> params) throws XGBoostError {
dmats = new DMatrix[] {dtrain, dtest};
booster = new Booster(params, dmats);
names = new String[] {"train", "test"};
@ -50,9 +50,9 @@ public class CVPack {
/**
* update one iteration
* @param iter iteration num
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public void update(int iter) throws XgboostError {
public void update(int iter) throws XGBoostError {
booster.update(dtrain, iter);
}
@ -60,9 +60,9 @@ public class CVPack {
* update one iteration
* @param iter iteration num
* @param obj customized objective
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public void update(int iter, IObjective obj) throws XgboostError {
public void update(int iter, IObjective obj) throws XGBoostError {
booster.update(dtrain, iter, obj);
}
@ -70,9 +70,9 @@ public class CVPack {
* evaluation
* @param iter iteration num
* @return
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public String eval(int iter) throws XgboostError {
public String eval(int iter) throws XGBoostError {
return booster.evalSet(dmats, names, iter);
}
@ -81,9 +81,9 @@ public class CVPack {
* @param iter iteration num
* @param eval customized eval
* @return
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public String eval(int iter, IEvaluation eval) throws XgboostError {
public String eval(int iter, IEvaluation eval) throws XGBoostError {
return booster.evalSet(dmats, names, iter, eval);
}
}

View File

@ -40,11 +40,11 @@ public class ErrorHandle {
/**
* check the return value of C API
* @param ret return valud of xgboostJNI C API call
* @throws org.dmlc.xgboost4j.util.XgboostError
* @throws org.dmlc.xgboost4j.util.XGBoostError
*/
public static void checkCall(int ret) throws XgboostError {
public static void checkCall(int ret) throws XGBoostError {
if(ret != 0) {
throw new XgboostError(XgboostJNI.XGBGetLastError());
throw new XGBoostError(XgboostJNI.XGBGetLastError());
}
}
}

View File

@ -47,7 +47,7 @@ public class Trainer {
* @return trained booster
*/
public static Booster train(Iterable<Entry<String, Object>> params, DMatrix dtrain, int round,
Iterable<Entry<String, DMatrix>> watchs, IObjective obj, IEvaluation eval) throws XgboostError {
Iterable<Entry<String, DMatrix>> watchs, IObjective obj, IEvaluation eval) throws XGBoostError {
//collect eval matrixs
String[] evalNames;
@ -112,7 +112,7 @@ public class Trainer {
* @param eval customized evaluation (set to null if not used)
* @return evaluation history
*/
public static String[] crossValiation(Iterable<Entry<String, Object>> params, DMatrix data, int round, int nfold, String[] metrics, IObjective obj, IEvaluation eval) throws XgboostError {
public static String[] crossValiation(Iterable<Entry<String, Object>> params, DMatrix data, int round, int nfold, String[] metrics, IObjective obj, IEvaluation eval) throws XGBoostError {
CVPack[] cvPacks = makeNFold(data, nfold, params, metrics);
String[] evalHist = new String[round];
String[] results = new String[cvPacks.length];
@ -149,7 +149,7 @@ public class Trainer {
* @param evalMetrics Evaluation metrics
* @return CV package array
*/
public static CVPack[] makeNFold(DMatrix data, int nfold, Iterable<Entry<String, Object>> params, String[] evalMetrics) throws XgboostError {
public static CVPack[] makeNFold(DMatrix data, int nfold, Iterable<Entry<String, Object>> params, String[] evalMetrics) throws XGBoostError {
List<Integer> samples = genRandPermutationNums(0, (int) data.rowNum());
int step = samples.size()/nfold;
int[] testSlice = new int[step];

View File

@ -19,8 +19,8 @@ package org.dmlc.xgboost4j.util;
* custom error class for xgboost
* @author hzx
*/
public class XgboostError extends Exception{
public XgboostError(String message) {
public class XGBoostError extends Exception{
public XGBoostError(String message) {
super(message);
}
}