more updates for Flink
more fix
This commit is contained in:
parent
43d7a85bc9
commit
aca0096b33
@ -25,6 +25,7 @@ object DistTrainWithFlink {
|
|||||||
// read trainining data
|
// read trainining data
|
||||||
val trainData =
|
val trainData =
|
||||||
MLUtils.readLibSVM(env, "/path/to/data/agaricus.txt.train")
|
MLUtils.readLibSVM(env, "/path/to/data/agaricus.txt.train")
|
||||||
|
val testData = MLUtils.readLibSVM(env, "/path/to/data/agaricus.txt.test")
|
||||||
// define parameters
|
// define parameters
|
||||||
val paramMap = List(
|
val paramMap = List(
|
||||||
"eta" -> 0.1,
|
"eta" -> 0.1,
|
||||||
@ -34,7 +35,7 @@ object DistTrainWithFlink {
|
|||||||
val round = 2
|
val round = 2
|
||||||
// train the model
|
// train the model
|
||||||
val model = XGBoost.train(paramMap, trainData, round)
|
val model = XGBoost.train(paramMap, trainData, round)
|
||||||
val predTrain = model.predict(trainData.map{x => x.vector})
|
val predTest = model.predict(testData.map{x => x.vector})
|
||||||
model.saveModelToHadoop("file:///path/to/xgboost.model")
|
model.saveModelAsHadoopFile("file:///path/to/xgboost.model")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -70,7 +70,7 @@ object XGBoost {
|
|||||||
* @param modelPath The path that is accessible by hadoop filesystem API.
|
* @param modelPath The path that is accessible by hadoop filesystem API.
|
||||||
* @return The loaded model
|
* @return The loaded model
|
||||||
*/
|
*/
|
||||||
def loadModelFromHadoop(modelPath: String) : XGBoostModel = {
|
def loadModelFromHadoopFile(modelPath: String) : XGBoostModel = {
|
||||||
new XGBoostModel(
|
new XGBoostModel(
|
||||||
XGBoostScala.loadModel(
|
XGBoostScala.loadModel(
|
||||||
FileSystem
|
FileSystem
|
||||||
|
|||||||
@ -31,7 +31,7 @@ class XGBoostModel (booster: Booster) extends Serializable {
|
|||||||
*
|
*
|
||||||
* @param modelPath The model path as in Hadoop path.
|
* @param modelPath The model path as in Hadoop path.
|
||||||
*/
|
*/
|
||||||
def saveModelToHadoop(modelPath: String): Unit = {
|
def saveModelAsHadoopFile(modelPath: String): Unit = {
|
||||||
booster.saveModel(FileSystem
|
booster.saveModel(FileSystem
|
||||||
.get(new Configuration)
|
.get(new Configuration)
|
||||||
.create(new Path(modelPath)))
|
.create(new Path(modelPath)))
|
||||||
|
|||||||
@ -49,7 +49,7 @@ class XGBoostModel(booster: Booster)(implicit val sc: SparkContext) extends Seri
|
|||||||
*
|
*
|
||||||
* @param modelPath The model path as in Hadoop path.
|
* @param modelPath The model path as in Hadoop path.
|
||||||
*/
|
*/
|
||||||
def saveModelToHadoopFile(modelPath: String): Unit = {
|
def saveModelAsHadoopFile(modelPath: String): Unit = {
|
||||||
val path = new Path(modelPath)
|
val path = new Path(modelPath)
|
||||||
val outputStream = path.getFileSystem(sc.hadoopConfiguration).create(path)
|
val outputStream = path.getFileSystem(sc.hadoopConfiguration).create(path)
|
||||||
booster.saveModel(outputStream)
|
booster.saveModel(outputStream)
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user