Merge pull request #733 from damiencarol/javadocfix

[Java] Fix broken javadoc generation
This commit is contained in:
Yuan (Terry) Tang 2016-01-12 09:03:50 -06:00
commit 50af394272
8 changed files with 113 additions and 97 deletions

View File

@ -59,7 +59,7 @@ public final class Booster {
* init Booster from dMatrixs
* @param params parameters
* @param dMatrixs DMatrix array
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public Booster(Iterable<Entry<String, Object>> params, DMatrix[] dMatrixs) throws XGBoostError {
init(dMatrixs);
@ -73,7 +73,7 @@ public final class Booster {
* load model from modelPath
* @param params parameters
* @param modelPath booster modelPath (model generated by booster.saveModel)
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public Booster(Iterable<Entry<String, Object>> params, String modelPath) throws XGBoostError {
init(null);
@ -103,7 +103,7 @@ public final class Booster {
* set parameter
* @param key param name
* @param value param value
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public final void setParam(String key, String value) throws XGBoostError {
ErrorHandle.checkCall(XgboostJNI.XGBoosterSetParam(handle, key, value));
@ -112,7 +112,7 @@ public final class Booster {
/**
* set parameters
* @param params parameters key-value map
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public void setParams(Iterable<Entry<String, Object>> params) throws XGBoostError {
if(params!=null) {
@ -127,7 +127,7 @@ public final class Booster {
* Update (one iteration)
* @param dtrain training data
* @param iter current iteration number
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public void update(DMatrix dtrain, int iter) throws XGBoostError {
ErrorHandle.checkCall(XgboostJNI.XGBoosterUpdateOneIter(handle, iter, dtrain.getHandle()));
@ -138,7 +138,7 @@ public final class Booster {
* @param dtrain training data
* @param iter current iteration number
* @param obj customized objective class
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public void update(DMatrix dtrain, int iter, IObjective obj) throws XGBoostError {
float[][] predicts = predict(dtrain, true);
@ -151,7 +151,7 @@ public final class Booster {
* @param dtrain training data
* @param grad first order of gradient
* @param hess seconde order of gradient
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public void boost(DMatrix dtrain, float[] grad, float[] hess) throws XGBoostError {
if(grad.length != hess.length) {
@ -166,7 +166,7 @@ public final class Booster {
* @param evalNames name for eval dmatrixs, used for check results
* @param iter current eval iteration
* @return eval information
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public String evalSet(DMatrix[] evalMatrixs, String[] evalNames, int iter) throws XGBoostError {
long[] handles = dMatrixs2handles(evalMatrixs);
@ -177,12 +177,12 @@ public final class Booster {
/**
* evaluate with given customized Evaluation class
* @param evalMatrixs
* @param evalNames
* @param iter
* @param eval
* @param evalMatrixs evaluation matrix
* @param evalNames evaluation names
* @param iter number of interations
* @param eval custom evaluator
* @return eval information
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public String evalSet(DMatrix[] evalMatrixs, String[] evalNames, int iter, IEvaluation eval) throws XGBoostError {
String evalInfo = "";
@ -202,7 +202,7 @@ public final class Booster {
* @param evalNames name for eval dmatrixs, used for check results
* @param iter current eval iteration
* @return eval information
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public String evalSet(long[] dHandles, String[] evalNames, int iter) throws XGBoostError {
String[] evalInfo = new String[1];
@ -213,11 +213,11 @@ public final class Booster {
/**
* evaluate with given dmatrix, similar to evalSet
* @param evalMat
* @param evalName
* @param iter
* @param evalMat evaluation matrix
* @param evalName evaluation name
* @param iter number of iterations
* @return eval information
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public String eval(DMatrix evalMat, String evalName, int iter) throws XGBoostError {
DMatrix[] evalMats = new DMatrix[] {evalMat};
@ -227,10 +227,10 @@ public final class Booster {
/**
* base function for Predict
* @param data
* @param outPutMargin
* @param treeLimit
* @param predLeaf
* @param data data
* @param outPutMargin output margin
* @param treeLimit limit number of trees
* @param predLeaf prediction minimum to keep leafs
* @return predict results
*/
private synchronized float[][] pred(DMatrix data, boolean outPutMargin, int treeLimit, boolean predLeaf) throws XGBoostError {
@ -259,7 +259,7 @@ public final class Booster {
* Predict with data
* @param data dmatrix storing the input
* @return predict result
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public float[][] predict(DMatrix data) throws XGBoostError {
return pred(data, false, 0, false);
@ -270,7 +270,7 @@ public final class Booster {
* @param data dmatrix storing the input
* @param outPutMargin Whether to output the raw untransformed margin value.
* @return predict result
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public float[][] predict(DMatrix data, boolean outPutMargin) throws XGBoostError {
return pred(data, outPutMargin, 0, false);
@ -282,7 +282,7 @@ public final class Booster {
* @param outPutMargin Whether to output the raw untransformed margin value.
* @param treeLimit Limit number of trees in the prediction; defaults to 0 (use all trees).
* @return predict result
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public float[][] predict(DMatrix data, boolean outPutMargin, int treeLimit) throws XGBoostError {
return pred(data, outPutMargin, treeLimit, false);
@ -297,7 +297,7 @@ public final class Booster {
Note that the leaf index of a tree is unique per tree, so you may find leaf 1
in both tree 1 and tree 0.
* @return predict result
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public float[][] predict(DMatrix data , int treeLimit, boolean predLeaf) throws XGBoostError {
return pred(data, false, treeLimit, predLeaf);
@ -305,7 +305,7 @@ public final class Booster {
/**
* save model to modelPath
* @param modelPath
* @param modelPath model path
*/
public void saveModel(String modelPath) {
XgboostJNI.XGBoosterSaveModel(handle, modelPath);
@ -319,7 +319,7 @@ public final class Booster {
* get the dump of the model as a string array
* @param withStats Controls whether the split statistics are output.
* @return dumped model information
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public String[] getDumpInfo(boolean withStats) throws XGBoostError {
int statsFlag = 0;
@ -336,7 +336,7 @@ public final class Booster {
* @param featureMap featureMap file
* @param withStats Controls whether the split statistics are output.
* @return dumped model information
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public String[] getDumpInfo(String featureMap, boolean withStats) throws XGBoostError {
int statsFlag = 0;
@ -353,10 +353,10 @@ public final class Booster {
* @param modelPath file to save dumped model info
* @param withStats bool
Controls whether the split statistics are output.
* @throws FileNotFoundException
* @throws UnsupportedEncodingException
* @throws IOException
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws FileNotFoundException file not found
* @throws UnsupportedEncodingException unsupported feature
* @throws IOException error with model writing
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public void dumpModel(String modelPath, boolean withStats) throws FileNotFoundException, UnsupportedEncodingException, IOException, XGBoostError {
File tf = new File(modelPath);
@ -380,10 +380,10 @@ public final class Booster {
* @param featureMap featureMap file
* @param withStats bool
Controls whether the split statistics are output.
* @throws FileNotFoundException
* @throws UnsupportedEncodingException
* @throws IOException
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws FileNotFoundException exception
* @throws UnsupportedEncodingException exception
* @throws IOException exception
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public void dumpModel(String modelPath, String featureMap, boolean withStats) throws FileNotFoundException, UnsupportedEncodingException, IOException, XGBoostError {
File tf = new File(modelPath);
@ -404,7 +404,7 @@ public final class Booster {
/**
* get importance of each feature
* @return featureMap key: feature index, value: feature importance score
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public Map<String, Integer> getFeatureScore() throws XGBoostError {
String[] modelInfos = getDumpInfo(false);
@ -433,7 +433,7 @@ public final class Booster {
* get importance of each feature
* @param featureMap file to save dumped model info
* @return featureMap key: feature index, value: feature importance score
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public Map<String, Integer> getFeatureScore(String featureMap) throws XGBoostError {
String[] modelInfos = getDumpInfo(featureMap, false);

View File

@ -51,8 +51,8 @@ public class DMatrix {
/**
* init DMatrix from file (svmlight format)
* @param dataPath
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @param dataPath path of data file
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public DMatrix(String dataPath) throws XGBoostError {
if(dataPath == null) {
@ -69,7 +69,7 @@ public class DMatrix {
* @param indices Indices (colIndexs for CSR or rowIndexs for CSC)
* @param data non zero values (sequence by row for CSR or by col for CSC)
* @param st sparse matrix type (CSR or CSC)
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public DMatrix(long[] headers, int[] indices, float[] data, SparseType st) throws XGBoostError {
long[] out = new long[1];
@ -90,7 +90,7 @@ public class DMatrix {
* @param data data values
* @param nrow number of rows
* @param ncol number of columns
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public DMatrix(float[] data, int nrow, int ncol) throws XGBoostError {
long[] out = new long[1];
@ -110,7 +110,8 @@ public class DMatrix {
/**
* set label of dmatrix
* @param labels
* @param labels labels
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public void setLabel(float[] labels) throws XGBoostError {
ErrorHandle.checkCall(XgboostJNI.XGDMatrixSetFloatInfo(handle, "label", labels));
@ -118,8 +119,8 @@ public class DMatrix {
/**
* set weight of each instance
* @param weights
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @param weights weights
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public void setWeight(float[] weights) throws XGBoostError {
ErrorHandle.checkCall(XgboostJNI.XGDMatrixSetFloatInfo(handle, "weight", weights));
@ -128,8 +129,8 @@ public class DMatrix {
/**
* if specified, xgboost will start from this init margin
* can be used to specify initial prediction to boost from
* @param baseMargin
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @param baseMargin base margin
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public void setBaseMargin(float[] baseMargin) throws XGBoostError {
ErrorHandle.checkCall(XgboostJNI.XGDMatrixSetFloatInfo(handle, "base_margin", baseMargin));
@ -138,8 +139,8 @@ public class DMatrix {
/**
* if specified, xgboost will start from this init margin
* can be used to specify initial prediction to boost from
* @param baseMargin
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @param baseMargin base margin
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public void setBaseMargin(float[][] baseMargin) throws XGBoostError {
float[] flattenMargin = flatten(baseMargin);
@ -148,8 +149,8 @@ public class DMatrix {
/**
* Set group sizes of DMatrix (used for ranking)
* @param group
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @param group group size as array
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public void setGroup(int[] group) throws XGBoostError {
ErrorHandle.checkCall(XgboostJNI.XGDMatrixSetGroup(handle, group));
@ -170,7 +171,7 @@ public class DMatrix {
/**
* get label values
* @return label
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public float[] getLabel() throws XGBoostError {
return getFloatInfo("label");
@ -179,7 +180,7 @@ public class DMatrix {
/**
* get weight of the DMatrix
* @return weights
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public float[] getWeight() throws XGBoostError {
return getFloatInfo("weight");
@ -188,7 +189,7 @@ public class DMatrix {
/**
* get base margin of the DMatrix
* @return base margin
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public float[] getBaseMargin() throws XGBoostError {
return getFloatInfo("base_margin");
@ -196,9 +197,9 @@ public class DMatrix {
/**
* Slice the DMatrix and return a new DMatrix that only contains `rowIndex`.
* @param rowIndex
* @param rowIndex row index
* @return sliced new DMatrix
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public DMatrix slice(int[] rowIndex) throws XGBoostError {
long[] out = new long[1];
@ -211,7 +212,7 @@ public class DMatrix {
/**
* get the row number of DMatrix
* @return number of rows
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public long rowNum() throws XGBoostError {
long[] rowNum = new long[1];
@ -221,12 +222,16 @@ public class DMatrix {
/**
* save DMatrix to filePath
* @param filePath
* @param filePath file path
*/
public void saveBinary(String filePath) {
XgboostJNI.XGDMatrixSaveBinary(handle, filePath, 1);
}
/**
* Get the handle
* @return native handler id
*/
public long getHandle() {
return handle;
}

View File

@ -16,21 +16,26 @@
package org.dmlc.xgboost4j;
/**
* interface for customized evaluation
* interface for customized evaluation
*
* @author hzx
*/
public interface IEvaluation {
/**
* get evaluate metric
* @return evalMetric
*/
public abstract String getMetric();
/**
* evaluate with predicts and data
* @param predicts
* @param dmat
* @return
*/
public abstract float eval(float[][] predicts, DMatrix dmat);
public interface IEvaluation {
/**
* get evaluate metric
*
* @return evalMetric
*/
public abstract String getMetric();
/**
* evaluate with predicts and data
*
* @param predicts
* predictions as array
* @param dmat
* data matrix to evaluate
* @return result of the metric
*/
public abstract float eval(float[][] predicts, DMatrix dmat);
}

View File

@ -37,7 +37,7 @@ public class CVPack {
* @param dtrain train data
* @param dtest test data
* @param params parameters
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public CVPack(DMatrix dtrain, DMatrix dtest, Iterable<Map.Entry<String, Object>> params) throws XGBoostError {
dmats = new DMatrix[] {dtrain, dtest};
@ -50,7 +50,7 @@ public class CVPack {
/**
* update one iteration
* @param iter iteration num
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public void update(int iter) throws XGBoostError {
booster.update(dtrain, iter);
@ -60,7 +60,7 @@ public class CVPack {
* update one iteration
* @param iter iteration num
* @param obj customized objective
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public void update(int iter, IObjective obj) throws XGBoostError {
booster.update(dtrain, iter, obj);
@ -69,8 +69,8 @@ public class CVPack {
/**
* evaluation
* @param iter iteration num
* @return
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @return evaluation
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public String eval(int iter) throws XGBoostError {
return booster.evalSet(dmats, names, iter);
@ -80,8 +80,8 @@ public class CVPack {
* evaluation
* @param iter iteration num
* @param eval customized eval
* @return
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @return evaluation
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public String eval(int iter, IEvaluation eval) throws XGBoostError {
return booster.evalSet(dmats, names, iter, eval);

View File

@ -21,8 +21,7 @@ import org.apache.commons.logging.LogFactory;
import org.dmlc.xgboost4j.wrapper.XgboostJNI;
/**
* error handle for Xgboost
* @author hzx
* Error handle for Xgboost.
*/
public class ErrorHandle {
private static final Log logger = LogFactory.getLog(ErrorHandle.class);
@ -38,9 +37,9 @@ public class ErrorHandle {
}
/**
* check the return value of C API
* Check the return value of C API.
* @param ret return valud of xgboostJNI C API call
* @throws org.dmlc.xgboost4j.util.XGBoostError
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public static void checkCall(int ret) throws XGBoostError {
if(ret != 0) {

View File

@ -44,8 +44,8 @@ public class Initializer {
/**
* load native library, this method will first try to load library from java.library.path, then try to load library in jar package.
* @param libName
* @throws IOException
* @param libName library path
* @throws IOException exception
*/
private static void smartLoad(String libName) throws IOException {
addNativeDir(nativePath);
@ -63,9 +63,9 @@ public class Initializer {
}
/**
* add libPath to java.library.path, then native library in libPath would be load properly
* @param libPath
* @throws IOException
* Add libPath to java.library.path, then native library in libPath would be load properly
* @param libPath library path
* @throws IOException exception
*/
public static void addNativeDir(String libPath) throws IOException {
try {

View File

@ -25,10 +25,11 @@ import java.io.OutputStream;
/**
* Simple library class for working with JNI (Java Native Interface)
*
* @see http://adamheinrich.com/2012/how-to-load-native-jni-library-from-jar
*
* @author Adam Heirnich &lt;adam@adamh.cz&gt;, http://www.adamh.cz
* <p>
* See <a href="http://adamheinrich.com/2012/how-to-load-native-jni-library-from-jar">
* http://adamheinrich.com/2012/how-to-load-native-jni-library-from-jar</a>
* <p>
* Author Adam Heirnich &lt;adam@adamh.cz&gt;, http://www.adamh.cz
*/
public class NativeUtils {
@ -40,14 +41,17 @@ public class NativeUtils {
/**
* Loads library from current JAR archive
*
* The file from JAR is copied into system temporary directory and then loaded. The temporary file is deleted after exiting.
* <p>
* The file from JAR is copied into system temporary directory and then loaded.
* The temporary file is deleted after exiting.
* Method uses String as filename because the pathname is "abstract", not system-dependent.
* <p>
* The restrictions of {@link File#createTempFile(java.lang.String, java.lang.String)} apply to {@code path}.
*
* @param path The filename inside JAR as absolute path (beginning with '/'), e.g. /package/File.ext
* @throws IOException If temporary file creation or read/write operation fails
* @throws IllegalArgumentException If source file (param path) does not exist
* @throws IllegalArgumentException If the path is not absolute or if the filename is shorter than three characters (restriction of {@see File#createTempFile(java.lang.String, java.lang.String)}).
* @throws IllegalArgumentException If the path is not absolute or if the filename is shorter than three characters
*/
public static void loadLibraryFromJar(String path) throws IOException {

View File

@ -45,6 +45,7 @@ public class Trainer {
* @param obj customized objective (set to null if not used)
* @param eval customized evaluation (set to null if not used)
* @return trained booster
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public static Booster train(Iterable<Entry<String, Object>> params, DMatrix dtrain, int round,
Iterable<Entry<String, DMatrix>> watchs, IObjective obj, IEvaluation eval) throws XGBoostError {
@ -111,6 +112,7 @@ public class Trainer {
* @param obj customized objective (set to null if not used)
* @param eval customized evaluation (set to null if not used)
* @return evaluation history
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public static String[] crossValiation(Iterable<Entry<String, Object>> params, DMatrix data, int round, int nfold, String[] metrics, IObjective obj, IEvaluation eval) throws XGBoostError {
CVPack[] cvPacks = makeNFold(data, nfold, params, metrics);
@ -148,6 +150,7 @@ public class Trainer {
* @param params booster parameters
* @param evalMetrics Evaluation metrics
* @return CV package array
* @throws org.dmlc.xgboost4j.util.XGBoostError native error
*/
public static CVPack[] makeNFold(DMatrix data, int nfold, Iterable<Entry<String, Object>> params, String[] evalMetrics) throws XGBoostError {
List<Integer> samples = genRandPermutationNums(0, (int) data.rowNum());