fix examples

This commit is contained in:
CodingCat 2016-03-11 13:57:03 -05:00
parent aca0096b33
commit ab68a0ccc7
5 changed files with 29 additions and 14 deletions

View File

@ -34,7 +34,7 @@ object DistTrainWithFlink {
// number of iterations // number of iterations
val round = 2 val round = 2
// train the model // train the model
val model = XGBoost.train(paramMap, trainData, round) val model = XGBoost.train(trainData, paramMap, round)
val predTest = model.predict(testData.map{x => x.vector}) val predTest = model.predict(testData.map{x => x.vector})
model.saveModelAsHadoopFile("file:///path/to/xgboost.model") model.saveModelAsHadoopFile("file:///path/to/xgboost.model")
} }

View File

@ -16,29 +16,34 @@
package ml.dmlc.xgboost4j.scala.example.spark package ml.dmlc.xgboost4j.scala.example.spark
import ml.dmlc.xgboost4j.scala.spark.XGBoost import ml.dmlc.xgboost4j.scala.DMatrix
import ml.dmlc.xgboost4j.scala.spark.{DataUtils, XGBoost}
import org.apache.spark.SparkContext import org.apache.spark.SparkContext
import org.apache.spark.mllib.util.MLUtils import org.apache.spark.mllib.util.MLUtils
object DistTrainWithSpark { object DistTrainWithSpark {
def main(args: Array[String]): Unit = { def main(args: Array[String]): Unit = {
if (args.length != 4) { if (args.length != 5) {
println( println(
"usage: program num_of_rounds num_workers training_path model_path") "usage: program num_of_rounds num_workers training_path test_path model_path")
sys.exit(1) sys.exit(1)
} }
val sc = new SparkContext() val sc = new SparkContext()
val inputTrainPath = args(2) val inputTrainPath = args(2)
val outputModelPath = args(3) val inputTestPath = args(3)
val outputModelPath = args(4)
// number of iterations // number of iterations
val numRound = args(0).toInt val numRound = args(0).toInt
val trainRDD = MLUtils.loadLibSVMFile(sc, inputTrainPath).repartition(args(1).toInt) import DataUtils._
val trainRDD = MLUtils.loadLibSVMFile(sc, inputTrainPath)
val testSet = MLUtils.loadLibSVMFile(sc, inputTestPath).collect().iterator
// training parameters // training parameters
val paramMap = List( val paramMap = List(
"eta" -> 0.1f, "eta" -> 0.1f,
"max_depth" -> 2, "max_depth" -> 2,
"objective" -> "binary:logistic").toMap "objective" -> "binary:logistic").toMap
val xgboostModel = XGBoost.train(trainRDD, paramMap, numRound) val xgboostModel = XGBoost.train(trainRDD, paramMap, numRound, nWorkers = args(1).toInt)
xgboostModel.predict(new DMatrix(testSet))
// save model to HDFS path // save model to HDFS path
xgboostModel.saveModelAsHadoopFile(outputModelPath) xgboostModel.saveModelAsHadoopFile(outputModelPath)
} }

View File

@ -81,13 +81,14 @@ object XGBoost {
/** /**
* Train a xgboost model with link. * Train a xgboost model with link.
* *
* @param params The parameters to XGBoost.
* @param dtrain The training data. * @param dtrain The training data.
* @param params The parameters to XGBoost.
* @param round Number of rounds to train. * @param round Number of rounds to train.
*/ */
def train(params: Map[String, Any], def train(
dtrain: DataSet[LabeledVector], dtrain: DataSet[LabeledVector],
round: Int): XGBoostModel = { params: Map[String, Any],
round: Int): XGBoostModel = {
val tracker = new RabitTracker(dtrain.getExecutionEnvironment.getParallelism) val tracker = new RabitTracker(dtrain.getExecutionEnvironment.getParallelism)
if (tracker.start()) { if (tracker.start()) {
dtrain dtrain

View File

@ -37,6 +37,15 @@ class XGBoostModel (booster: Booster) extends Serializable {
.create(new Path(modelPath))) .create(new Path(modelPath)))
} }
/**
* predict with the given DMatrix
* @param testSet the local test set represented as DMatrix
* @return prediction result
*/
def predict(testSet: DMatrix): Array[Array[Float]] = {
booster.predict(testSet, true, 0)
}
/** /**
* Predict given vector dataset. * Predict given vector dataset.
* *
@ -44,7 +53,7 @@ class XGBoostModel (booster: Booster) extends Serializable {
* @return The prediction result. * @return The prediction result.
*/ */
def predict(data: DataSet[Vector]) : DataSet[Array[Float]] = { def predict(data: DataSet[Vector]) : DataSet[Array[Float]] = {
val predictMap: Iterator[Vector] => TraversableOnce[Array[Float]] = val predictMap: Iterator[Vector] => Traversable[Array[Float]] =
(it: Iterator[Vector]) => { (it: Iterator[Vector]) => {
val mapper = (x: Vector) => { val mapper = (x: Vector) => {
val (index, value) = x.toSeq.unzip val (index, value) = x.toSeq.unzip

View File

@ -35,10 +35,10 @@ class DMatrix private[scala](private[scala] val jDMatrix: JDMatrix) {
* init DMatrix from Iterator of LabeledPoint * init DMatrix from Iterator of LabeledPoint
* *
* @param dataIter An iterator of LabeledPoint * @param dataIter An iterator of LabeledPoint
* @param cacheInfo Cache path information, used for external memory setting, can be null. * @param cacheInfo Cache path information, used for external memory setting, null by default.
* @throws XGBoostError native error * @throws XGBoostError native error
*/ */
def this(dataIter: Iterator[LabeledPoint], cacheInfo: String) { def this(dataIter: Iterator[LabeledPoint], cacheInfo: String = null) {
this(new JDMatrix(dataIter.asJava, cacheInfo)) this(new JDMatrix(dataIter.asJava, cacheInfo))
} }