From d9c22e54deb125dc5d38dd0b64906d72491f0ddc Mon Sep 17 00:00:00 2001 From: nachocano Date: Fri, 5 Dec 2014 13:28:42 -0800 Subject: [PATCH] closer, but still does not work... stays in map 100%. I think an exception is being thrown --- submit_job_hadoop.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/submit_job_hadoop.py b/submit_job_hadoop.py index fbb83a79f..d7ef17ff5 100755 --- a/submit_job_hadoop.py +++ b/submit_job_hadoop.py @@ -22,7 +22,7 @@ parser.add_argument('-itr', '--iterations', required=True, type=int) args = parser.parse_args() def hadoop_streaming(nslaves, slave_args): - cmd = '%s jar %s -input %s -output %s -mapper \"%s %s\" -reducer /bin/cat stdin %d %d stdout' % (args.hadoop_binary, args.hadoop_streaming_jar, args.input, args.output, args.mapper, ' '.join(slave_args), args.nclusters, args.iterations) + cmd = '%s jar %s -input %s -output %s -mapper \"%s stdin %d %d stdout %s\" -reducer /bin/cat -file %s' % (args.hadoop_binary, args.hadoop_streaming_jar, args.input, args.output, args.mapper, args.nclusters, args.iterations, ' '.join(slave_args), args.mapper) print cmd subprocess.check_call(cmd, shell = True)