Merge commit '68c2aaa7fe8c1f4688cef2ace67642e85fd1c9d2'
This commit is contained in:
commit
135d461c40
@ -15,6 +15,7 @@ import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.DataOutputBuffer;
|
||||
import org.apache.hadoop.yarn.util.ConverterUtils;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
@ -36,6 +37,7 @@ import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||
import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest;
|
||||
import org.apache.hadoop.yarn.client.api.async.NMClientAsync;
|
||||
import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
|
||||
import org.apache.hadoop.security.Credentials;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
||||
/**
|
||||
@ -66,6 +68,10 @@ public class ApplicationMaster {
|
||||
|
||||
// username
|
||||
private String userName = "";
|
||||
// user credentials
|
||||
private Credentials credentials = null;
|
||||
// security tokens
|
||||
private ByteBuffer securityTokens = null;
|
||||
// application tracker hostname
|
||||
private String appHostName = "";
|
||||
// tracker URL to do
|
||||
@ -99,8 +105,12 @@ public class ApplicationMaster {
|
||||
|
||||
private ApplicationMaster() throws IOException {
|
||||
dfs = FileSystem.get(conf);
|
||||
userName = UserGroupInformation.getCurrentUser().getShortUserName();
|
||||
credentials = UserGroupInformation.getCurrentUser().getCredentials();
|
||||
DataOutputBuffer buffer = new DataOutputBuffer();
|
||||
this.credentials.writeTokenStorageToStream(buffer);
|
||||
this.securityTokens = ByteBuffer.wrap(buffer.getData());
|
||||
}
|
||||
|
||||
/**
|
||||
* get integer argument from environment variable
|
||||
*
|
||||
@ -132,7 +142,7 @@ public class ApplicationMaster {
|
||||
* @param args
|
||||
*/
|
||||
private void initArgs(String args[]) throws IOException {
|
||||
LOG.info("Invoke initArgs");
|
||||
LOG.info("Start AM as user=" + this.userName);
|
||||
// get user name
|
||||
userName = UserGroupInformation.getCurrentUser().getShortUserName();
|
||||
// cached maps
|
||||
@ -183,7 +193,7 @@ public class ApplicationMaster {
|
||||
RegisterApplicationMasterResponse response = this.rmClient
|
||||
.registerApplicationMaster(this.appHostName,
|
||||
this.appTrackerPort, this.appTrackerUrl);
|
||||
|
||||
|
||||
boolean success = false;
|
||||
String diagnostics = "";
|
||||
try {
|
||||
@ -274,13 +284,6 @@ public class ApplicationMaster {
|
||||
task.containerRequest = null;
|
||||
ContainerLaunchContext ctx = Records
|
||||
.newRecord(ContainerLaunchContext.class);
|
||||
String hadoop = "hadoop";
|
||||
if (System.getenv("HADOOP_HOME") != null) {
|
||||
hadoop = "${HADOOP_HOME}/bin/hadoop";
|
||||
} else if (System.getenv("HADOOP_PREFIX") != null) {
|
||||
hadoop = "${HADOOP_PREFIX}/bin/hadoop";
|
||||
}
|
||||
|
||||
String cmd =
|
||||
// use this to setup CLASSPATH correctly for libhdfs
|
||||
this.command + " 1>"
|
||||
@ -288,6 +291,7 @@ public class ApplicationMaster {
|
||||
+ " 2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR
|
||||
+ "/stderr";
|
||||
ctx.setCommands(Collections.singletonList(cmd));
|
||||
ctx.setTokens(this.securityTokens);
|
||||
LOG.info(workerResources);
|
||||
ctx.setLocalResources(this.workerResources);
|
||||
// setup environment variables
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
package org.apache.hadoop.yarn.rabit;
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
|
||||
@ -9,7 +10,9 @@ import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.io.DataOutputBuffer;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.Credentials;
|
||||
import org.apache.hadoop.yarn.api.ApplicationConstants;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationReport;
|
||||
@ -47,6 +50,8 @@ public class Client {
|
||||
private String tempdir = "/tmp";
|
||||
// user name
|
||||
private String userName = "";
|
||||
// user credentials
|
||||
private Credentials credentials = null;
|
||||
// job name
|
||||
private String jobName = "";
|
||||
// queue
|
||||
@ -60,10 +65,22 @@ public class Client {
|
||||
conf.addResource(new Path(System.getenv("HADOOP_CONF_DIR") +"/hdfs-site.xml"));
|
||||
dfs = FileSystem.get(conf);
|
||||
userName = UserGroupInformation.getCurrentUser().getShortUserName();
|
||||
credentials = UserGroupInformation.getCurrentUser().getCredentials();
|
||||
}
|
||||
|
||||
/**
|
||||
* ge
|
||||
* setup security token given current user
|
||||
* @return the ByeBuffer containing the security tokens
|
||||
* @throws IOException
|
||||
*/
|
||||
private ByteBuffer setupTokens() throws IOException {
|
||||
DataOutputBuffer buffer = new DataOutputBuffer();
|
||||
this.credentials.writeTokenStorageToStream(buffer);
|
||||
return ByteBuffer.wrap(buffer.getData());
|
||||
}
|
||||
|
||||
/**
|
||||
* setup all the cached files
|
||||
*
|
||||
* @param fmaps
|
||||
* the file maps
|
||||
@ -194,9 +211,10 @@ public class Client {
|
||||
.newRecord(ContainerLaunchContext.class);
|
||||
ApplicationSubmissionContext appContext = app
|
||||
.getApplicationSubmissionContext();
|
||||
|
||||
// Submit application
|
||||
ApplicationId appId = appContext.getApplicationId();
|
||||
// setup security token
|
||||
amContainer.setTokens(this.setupTokens());
|
||||
// setup cache-files and environment variables
|
||||
amContainer.setLocalResources(this.setupCacheFiles(appId));
|
||||
amContainer.setEnvironment(this.getEnvironment());
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user