change nslave to nworker

This commit is contained in:
tqchen 2014-12-29 18:42:24 -08:00
parent 39504825d8
commit bdfa1a0220
2 changed files with 17 additions and 12 deletions

View File

@ -29,20 +29,21 @@ if hadoop_binary == None or hadoop_streaming_jar == None:
print '\tTo enable auto-detection, you can set enviroment variable HADOOP_HOME or modify rabit_hadoop.py line 14' print '\tTo enable auto-detection, you can set enviroment variable HADOOP_HOME or modify rabit_hadoop.py line 14'
parser = argparse.ArgumentParser(description='Rabit script to submit rabit jobs using Hadoop Streaming') parser = argparse.ArgumentParser(description='Rabit script to submit rabit jobs using Hadoop Streaming')
parser.add_argument('-n', '--nslaves', required=True, type=int, parser.add_argument('-n', '--nworker', required=True, type=int,
help = 'number of slaves proccess to be launched') help = 'number of worker proccess to be launched')
parser.add_argument('-v', '--verbose', default=0, choices=[0, 1], type=int,
help = 'print more messages into the console')
parser.add_argument('-ac', '--auto_file_cache', default=1, choices=[0, 1], type=int,
help = 'whether automatically cache the files in the command to hadoop localfile, this is on by defaultz')
parser.add_argument('-i', '--input', required=True, parser.add_argument('-i', '--input', required=True,
help = 'input path in HDFS') help = 'input path in HDFS')
parser.add_argument('-o', '--output', required=True, parser.add_argument('-o', '--output', required=True,
help = 'output path in HDFS') help = 'output path in HDFS')
parser.add_argument('-v', '--verbose', default=0, choices=[0, 1], type=int,
help = 'print more messages into the console')
parser.add_argument('-ac', '--auto_file_cache', default=1, choices=[0, 1], type=int,
help = 'whether automatically cache the files in the command to hadoop localfile, this is on by default')
parser.add_argument('-f', '--files', nargs = '*', parser.add_argument('-f', '--files', nargs = '*',
help = 'the cached file list in mapreduce,'\ help = 'the cached file list in mapreduce,'\
' the submission script will automatically cache all the files which appears in command.'\ ' the submission script will automatically cache all the files which appears in command.'\
' you may need this option to cache additional files, or manually cache files when auto_file_cache is off') ' you may need this option to cache additional files, or manually cache files when auto_file_cache is off')
parser.add_argument('--jobname', help = 'customize jobname in tracker')
if hadoop_binary == None: if hadoop_binary == None:
parser.add_argument('-hb', '--hadoop_binary', required = True, parser.add_argument('-hb', '--hadoop_binary', required = True,
help="path-to-hadoop binary folder") help="path-to-hadoop binary folder")
@ -60,8 +61,12 @@ parser.add_argument('command', nargs='+',
help = 'command for rabit program') help = 'command for rabit program')
args = parser.parse_args() args = parser.parse_args()
def hadoop_streaming(nslaves, slave_args): if args.jobname is None:
cmd = '%s jar %s -D mapred.map.tasks=%d' % (args.hadoop_binary, args.hadoop_streaming_jar, nslaves) args.jobname = ('Rabit(nworker=%d):' % args.nworker) + args.command[0].split('/')[-1];
def hadoop_streaming(nworker, slave_args):
cmd = '%s jar %s -D mapred.map.tasks=%d' % (args.hadoop_binary, args.hadoop_streaming_jar, nworker)
cmd += ' -D mapred.job.name=%d' % (a)
cmd += ' -input %s -output %s' % (args.input, args.output) cmd += ' -input %s -output %s' % (args.input, args.output)
cmd += ' -mapper \"%s\" -reducer \"/bin/cat\" ' % (' '.join(args.command + slave_args)) cmd += ' -mapper \"%s\" -reducer \"/bin/cat\" ' % (' '.join(args.command + slave_args))
fset = set() fset = set()
@ -77,4 +82,4 @@ def hadoop_streaming(nslaves, slave_args):
print cmd print cmd
subprocess.check_call(cmd, shell = True) subprocess.check_call(cmd, shell = True)
tracker.submit(args.nslaves, [], fun_submit = hadoop_streaming, verbose = args.verbose) tracker.submit(args.nworker, [], fun_submit = hadoop_streaming, verbose = args.verbose)

View File

@ -10,8 +10,8 @@ import subprocess
import rabit_tracker as tracker import rabit_tracker as tracker
parser = argparse.ArgumentParser(description='Rabit script to submit rabit job using MPI') parser = argparse.ArgumentParser(description='Rabit script to submit rabit job using MPI')
parser.add_argument('-n', '--nslaves', required=True, type=int, parser.add_argument('-n', '--nworker', required=True, type=int,
help = 'number of slaves proccess to be launched') help = 'number of worker proccess to be launched')
parser.add_argument('-v', '--verbose', default=0, choices=[0, 1], type=int, parser.add_argument('-v', '--verbose', default=0, choices=[0, 1], type=int,
help = 'print more messages into the console') help = 'print more messages into the console')
parser.add_argument('-H', '--hostfile', type=str, parser.add_argument('-H', '--hostfile', type=str,
@ -42,4 +42,4 @@ def mpi_submit(nslave, slave_args):
subprocess.check_call(cmd, shell = True) subprocess.check_call(cmd, shell = True)
# call submit, with nslave, the commands to run each job and submit function # call submit, with nslave, the commands to run each job and submit function
tracker.submit(args.nslaves, [], fun_submit = mpi_submit, verbose = args.verbose) tracker.submit(args.nworker, [], fun_submit = mpi_submit, verbose = args.verbose)