change the API name since we support not only HDFS and local file system
This commit is contained in:
parent
8e3ce908fe
commit
43d7a85bc9
@ -40,6 +40,6 @@ object DistTrainWithSpark {
|
|||||||
"objective" -> "binary:logistic").toMap
|
"objective" -> "binary:logistic").toMap
|
||||||
val xgboostModel = XGBoost.train(trainRDD, paramMap, numRound)
|
val xgboostModel = XGBoost.train(trainRDD, paramMap, numRound)
|
||||||
// save model to HDFS path
|
// save model to HDFS path
|
||||||
xgboostModel.saveModelToHadoop(outputModelPath)
|
xgboostModel.saveModelAsHadoopFile(outputModelPath)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -128,7 +128,8 @@ object XGBoost extends Serializable {
|
|||||||
* @param modelPath The path of the file representing the model
|
* @param modelPath The path of the file representing the model
|
||||||
* @return The loaded model
|
* @return The loaded model
|
||||||
*/
|
*/
|
||||||
def loadModelFromHadoop(modelPath: String)(implicit sparkContext: SparkContext): XGBoostModel = {
|
def loadModelFromHadoopFile(modelPath: String)(implicit sparkContext: SparkContext):
|
||||||
|
XGBoostModel = {
|
||||||
val path = new Path(modelPath)
|
val path = new Path(modelPath)
|
||||||
val dataInStream = path.getFileSystem(sparkContext.hadoopConfiguration).open(path)
|
val dataInStream = path.getFileSystem(sparkContext.hadoopConfiguration).open(path)
|
||||||
val xgBoostModel = new XGBoostModel(SXGBoost.loadModel(dataInStream))
|
val xgBoostModel = new XGBoostModel(SXGBoost.loadModel(dataInStream))
|
||||||
|
|||||||
@ -49,7 +49,7 @@ class XGBoostModel(booster: Booster)(implicit val sc: SparkContext) extends Seri
|
|||||||
*
|
*
|
||||||
* @param modelPath The model path as in Hadoop path.
|
* @param modelPath The model path as in Hadoop path.
|
||||||
*/
|
*/
|
||||||
def saveModelToHadoop(modelPath: String): Unit = {
|
def saveModelToHadoopFile(modelPath: String): Unit = {
|
||||||
val path = new Path(modelPath)
|
val path = new Path(modelPath)
|
||||||
val outputStream = path.getFileSystem(sc.hadoopConfiguration).create(path)
|
val outputStream = path.getFileSystem(sc.hadoopConfiguration).create(path)
|
||||||
booster.saveModel(outputStream)
|
booster.saveModel(outputStream)
|
||||||
|
|||||||
@ -150,8 +150,8 @@ class XGBoostSuite extends FunSuite with BeforeAndAfter {
|
|||||||
"objective" -> "binary:logistic").toMap
|
"objective" -> "binary:logistic").toMap
|
||||||
val xgBoostModel = XGBoost.train(trainingRDD, paramMap, 5)
|
val xgBoostModel = XGBoost.train(trainingRDD, paramMap, 5)
|
||||||
assert(eval.eval(xgBoostModel.predict(testSetDMatrix), testSetDMatrix) < 0.1)
|
assert(eval.eval(xgBoostModel.predict(testSetDMatrix), testSetDMatrix) < 0.1)
|
||||||
xgBoostModel.saveModelToHadoop(tempFile.toFile.getAbsolutePath)
|
xgBoostModel.saveModelAsHadoopFile(tempFile.toFile.getAbsolutePath)
|
||||||
val loadedXGBooostModel = XGBoost.loadModelFromHadoop(tempFile.toFile.getAbsolutePath)
|
val loadedXGBooostModel = XGBoost.loadModelFromHadoopFile(tempFile.toFile.getAbsolutePath)
|
||||||
val predicts = loadedXGBooostModel.predict(testSetDMatrix)
|
val predicts = loadedXGBooostModel.predict(testSetDMatrix)
|
||||||
assert(eval.eval(predicts, testSetDMatrix) < 0.1)
|
assert(eval.eval(predicts, testSetDMatrix) < 0.1)
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user