fix stdin input

This commit is contained in:
tqchen 2015-03-08 12:22:11 -07:00
parent 3258bcf531
commit 2fbda812bc
5 changed files with 16 additions and 14 deletions

View File

@ -78,7 +78,7 @@ class HDFSStream : public utils::ISeekStream {
} }
inline void Close(void) { inline void Close(void) {
if (fp_ != NULL) { if (fp_ != NULL) {
if (hdfsCloseFile(fs_, fp_) == 0) { if (hdfsCloseFile(fs_, fp_) == -1) {
int errsv = errno; int errsv = errno;
utils::Error("HDFSStream.Close Error:%s", strerror(errsv)); utils::Error("HDFSStream.Close Error:%s", strerror(errsv));
} }

View File

@ -140,8 +140,10 @@ class LineSplitBase : public InputSplit {
class SingleFileSplit : public InputSplit { class SingleFileSplit : public InputSplit {
public: public:
explicit SingleFileSplit(const char *fname) { explicit SingleFileSplit(const char *fname) {
if (!strcmp(fname, "stdin")) { if (!strcmp(fname, "stdin")) {
use_stdin_ = true; #ifndef RABIT_STRICT_CXX98_
use_stdin_ = true; fp_ = stdin;
#endif
} }
if (!use_stdin_) { if (!use_stdin_) {
fp_ = utils::FopenCheck(fname, "r"); fp_ = utils::FopenCheck(fname, "r");

View File

@ -6,15 +6,15 @@ then
fi fi
# put the local training file to HDFS # put the local training file to HDFS
hadoop fs -rm -r -f $2/data #hadoop fs -rm -r -f $2/data
hadoop fs -rm -r -f $2/mushroom.linear.model hadoop fs -rm -r -f $2/mushroom.linear.model
hadoop fs -mkdir $2/data #hadoop fs -mkdir $2/data
hadoop fs -put ../data/agaricus.txt.train $2/data #hadoop fs -put ../data/agaricus.txt.train $2/data
# submit to hadoop # submit to hadoop
../../tracker/rabit_hadoop.py --host_ip ip -n $1 -i $2/data/agaricus.txt.train -o $2/mushroom.linear.model linear.rabit stdin model_out=stdout "${*:3}" ../../tracker/rabit_hadoop_streaming.py -n $1 --vcores 1 -i $2/data/agaricus.txt.train -o $2/mushroom.linear.model linear.rabit stdin model_out=stdout "${*:3}"
# get the final model file # get the final model file
hadoop fs -get $2/mushroom.linear.model/part-00000 ./linear.model #hadoop fs -get $2/mushroom.linear.model/part-00000 ./linear.model
./linear.rabit ../data/agaricus.txt.test task=pred model_in=linear.model #./linear.rabit ../data/agaricus.txt.test task=pred model_in=linear.model

View File

@ -1,12 +1,12 @@
# this is the common build script for rabit programs # this is the common build script for rabit programs
# you do not have to use it # you do not have to use it
export LDFLAGS= -pthread -lm -L../../lib -lrt export LDFLAGS= -L../../lib -pthread -lm -lrt
export CFLAGS = -Wall -msse2 -Wno-unknown-pragmas -fPIC -I../../include export CFLAGS = -Wall -msse2 -Wno-unknown-pragmas -fPIC -I../../include
# setup opencv # setup opencv
ifeq ($(USE_HDFS),1) ifeq ($(USE_HDFS),1)
CFLAGS+= -DRABIT_USE_HDFS=1 -I$(LIBHDFS_INCLUDE) -I$(JAVA_HOME)/include CFLAGS+= -DRABIT_USE_HDFS=1 -I$(HADOOP_HDFS_HOME)/include -I$(JAVA_HOME)/include
LDFLAGS+= -L$(HDFS_HOME)/lib/native -lhdfs LDFLAGS+= -L$(HADOOP_HDFS_HOME)/lib/native -L$(LIBJVM) -lhdfs -ljvm
else else
CFLAGS+= -DRABIT_USE_HDFS=0 CFLAGS+= -DRABIT_USE_HDFS=0
endif endif

View File

@ -17,5 +17,5 @@ export MPICXX = mpicxx
# whether use HDFS support during compile # whether use HDFS support during compile
USE_HDFS = 1 USE_HDFS = 1
# home of hadoop # path to libjvm.so
HDFS_HOME = $(HADOOP_HDFS_HOME) LIBJVM=$(JAVA_HOME)/jre/lib/amd64/server