Java Code Examples for org.apache.hadoop.mapreduce.MRJobConfig#JOB_CONF_FILE

The following examples show how to use org.apache.hadoop.mapreduce.MRJobConfig#JOB_CONF_FILE . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JobImpl.java    From hadoop with Apache License 2.0 5 votes vote down vote up
protected void setup(JobImpl job) throws IOException {

      String oldJobIDString = job.oldJobId.toString();
      String user = 
        UserGroupInformation.getCurrentUser().getShortUserName();
      Path path = MRApps.getStagingAreaDir(job.conf, user);
      if(LOG.isDebugEnabled()) {
        LOG.debug("startJobs: parent=" + path + " child=" + oldJobIDString);
      }

      job.remoteJobSubmitDir =
          FileSystem.get(job.conf).makeQualified(
              new Path(path, oldJobIDString));
      job.remoteJobConfFile =
          new Path(job.remoteJobSubmitDir, MRJobConfig.JOB_CONF_FILE);

      // Prepare the TaskAttemptListener server for authentication of Containers
      // TaskAttemptListener gets the information via jobTokenSecretManager.
      JobTokenIdentifier identifier =
          new JobTokenIdentifier(new Text(oldJobIDString));
      job.jobToken =
          new Token<JobTokenIdentifier>(identifier, job.jobTokenSecretManager);
      job.jobToken.setService(identifier.getJobId());
      // Add it to the jobTokenSecretManager so that TaskAttemptListener server
      // can authenticate containers(tasks)
      job.jobTokenSecretManager.addTokenForJob(oldJobIDString, job.jobToken);
      LOG.info("Adding job token for " + oldJobIDString
          + " to jobTokenSecretManager");

      // If the job client did not setup the shuffle secret then reuse
      // the job token secret for the shuffle.
      if (TokenCache.getShuffleSecretKey(job.jobCredentials) == null) {
        LOG.warn("Shuffle secret key missing from job credentials."
            + " Using job token secret as shuffle secret.");
        TokenCache.setShuffleSecretKey(job.jobToken.getPassword(),
            job.jobCredentials);
      }
    }
 
Example 2
Source File: TestAMWebServicesJobConf.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Override
protected void configureServlets() {

  Path confPath = new Path(testConfDir.toString(),
      MRJobConfig.JOB_CONF_FILE);
  Configuration config = new Configuration();

  FileSystem localFs;
  try {
    localFs = FileSystem.getLocal(config);
    confPath = localFs.makeQualified(confPath);

    OutputStream out = localFs.create(confPath);
    try {
      conf.writeXml(out);
    } finally {
      out.close();
    }
    if (!localFs.exists(confPath)) {
      fail("error creating config file: " + confPath);
    }

  } catch (IOException e) {
    fail("error creating config file: " + e.getMessage());
  }

  appContext = new MockAppContext(0, 2, 1, confPath);

  bind(JAXBContextResolver.class);
  bind(AMWebServices.class);
  bind(GenericExceptionHandler.class);
  bind(AppContext.class).toInstance(appContext);
  bind(Configuration.class).toInstance(conf);

  serve("/*").with(GuiceContainer.class);
}
 
Example 3
Source File: JobImpl.java    From big-c with Apache License 2.0 5 votes vote down vote up
protected void setup(JobImpl job) throws IOException {

      String oldJobIDString = job.oldJobId.toString();
      String user = 
        UserGroupInformation.getCurrentUser().getShortUserName();
      Path path = MRApps.getStagingAreaDir(job.conf, user);
      if(LOG.isDebugEnabled()) {
        LOG.debug("startJobs: parent=" + path + " child=" + oldJobIDString);
      }

      job.remoteJobSubmitDir =
          FileSystem.get(job.conf).makeQualified(
              new Path(path, oldJobIDString));
      job.remoteJobConfFile =
          new Path(job.remoteJobSubmitDir, MRJobConfig.JOB_CONF_FILE);

      // Prepare the TaskAttemptListener server for authentication of Containers
      // TaskAttemptListener gets the information via jobTokenSecretManager.
      JobTokenIdentifier identifier =
          new JobTokenIdentifier(new Text(oldJobIDString));
      job.jobToken =
          new Token<JobTokenIdentifier>(identifier, job.jobTokenSecretManager);
      job.jobToken.setService(identifier.getJobId());
      // Add it to the jobTokenSecretManager so that TaskAttemptListener server
      // can authenticate containers(tasks)
      job.jobTokenSecretManager.addTokenForJob(oldJobIDString, job.jobToken);
      LOG.info("Adding job token for " + oldJobIDString
          + " to jobTokenSecretManager");

      // If the job client did not setup the shuffle secret then reuse
      // the job token secret for the shuffle.
      if (TokenCache.getShuffleSecretKey(job.jobCredentials) == null) {
        LOG.warn("Shuffle secret key missing from job credentials."
            + " Using job token secret as shuffle secret.");
        TokenCache.setShuffleSecretKey(job.jobToken.getPassword(),
            job.jobCredentials);
      }
    }
 
Example 4
Source File: TestAMWebServicesJobConf.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Override
protected void configureServlets() {

  Path confPath = new Path(testConfDir.toString(),
      MRJobConfig.JOB_CONF_FILE);
  Configuration config = new Configuration();

  FileSystem localFs;
  try {
    localFs = FileSystem.getLocal(config);
    confPath = localFs.makeQualified(confPath);

    OutputStream out = localFs.create(confPath);
    try {
      conf.writeXml(out);
    } finally {
      out.close();
    }
    if (!localFs.exists(confPath)) {
      fail("error creating config file: " + confPath);
    }

  } catch (IOException e) {
    fail("error creating config file: " + e.getMessage());
  }

  appContext = new MockAppContext(0, 2, 1, confPath);

  bind(JAXBContextResolver.class);
  bind(AMWebServices.class);
  bind(GenericExceptionHandler.class);
  bind(AppContext.class).toInstance(appContext);
  bind(Configuration.class).toInstance(conf);

  serve("/*").with(GuiceContainer.class);
}
 
Example 5
Source File: YarnChild.java    From hadoop with Apache License 2.0 4 votes vote down vote up
private static void configureTask(JobConf job, Task task,
    Credentials credentials, Token<JobTokenIdentifier> jt) throws IOException {
  job.setCredentials(credentials);
  
  ApplicationAttemptId appAttemptId =
      ConverterUtils.toContainerId(
          System.getenv(Environment.CONTAINER_ID.name()))
          .getApplicationAttemptId();
  LOG.debug("APPLICATION_ATTEMPT_ID: " + appAttemptId);
  // Set it in conf, so as to be able to be used the the OutputCommitter.
  job.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID,
      appAttemptId.getAttemptId());

  // set tcp nodelay
  job.setBoolean("ipc.client.tcpnodelay", true);
  job.setClass(MRConfig.TASK_LOCAL_OUTPUT_CLASS,
      YarnOutputFiles.class, MapOutputFile.class);
  // set the jobToken and shuffle secrets into task
  task.setJobTokenSecret(
      JobTokenSecretManager.createSecretKey(jt.getPassword()));
  byte[] shuffleSecret = TokenCache.getShuffleSecretKey(credentials);
  if (shuffleSecret == null) {
    LOG.warn("Shuffle secret missing from task credentials."
        + " Using job token secret as shuffle secret.");
    shuffleSecret = jt.getPassword();
  }
  task.setShuffleSecret(
      JobTokenSecretManager.createSecretKey(shuffleSecret));

  // setup the child's MRConfig.LOCAL_DIR.
  configureLocalDirs(task, job);

  // setup the child's attempt directories
  // Do the task-type specific localization
  task.localizeConfiguration(job);

  // Set up the DistributedCache related configs
  MRApps.setupDistributedCacheLocal(job);

  // Overwrite the localized task jobconf which is linked to in the current
  // work-dir.
  Path localTaskFile = new Path(MRJobConfig.JOB_CONF_FILE);
  writeLocalJobFile(localTaskFile, job);
  task.setJobFile(localTaskFile.toString());
  task.setConf(job);
}
 
Example 6
Source File: MRApps.java    From hadoop with Apache License 2.0 4 votes vote down vote up
public static String getJobFile(Configuration conf, String user, 
    org.apache.hadoop.mapreduce.JobID jobId) {
  Path jobFile = new Path(MRApps.getStagingAreaDir(conf, user),
      jobId.toString() + Path.SEPARATOR + MRJobConfig.JOB_CONF_FILE);
  return jobFile.toString();
}
 
Example 7
Source File: TestHsWebServicesJobConf.java    From hadoop with Apache License 2.0 4 votes vote down vote up
@Override
protected void configureServlets() {

  Path confPath = new Path(testConfDir.toString(),
      MRJobConfig.JOB_CONF_FILE);
  Configuration config = new Configuration();

  FileSystem localFs;
  try {
    localFs = FileSystem.getLocal(config);
    confPath = localFs.makeQualified(confPath);

    OutputStream out = localFs.create(confPath);
    try {
      conf.writeXml(out);
    } finally {
      out.close();
    }
    if (!localFs.exists(confPath)) {
      fail("error creating config file: " + confPath);
    }

  } catch (IOException e) {
    fail("error creating config file: " + e.getMessage());
  }

  appContext = new MockHistoryContext(0, 2, 1, confPath);

  webApp = mock(HsWebApp.class);
  when(webApp.name()).thenReturn("hsmockwebapp");

  bind(JAXBContextResolver.class);
  bind(HsWebServices.class);
  bind(GenericExceptionHandler.class);
  bind(WebApp.class).toInstance(webApp);
  bind(AppContext.class).toInstance(appContext);
  bind(HistoryContext.class).toInstance(appContext);
  bind(Configuration.class).toInstance(conf);

  serve("/*").with(GuiceContainer.class);
}
 
Example 8
Source File: YarnChild.java    From big-c with Apache License 2.0 4 votes vote down vote up
private static void configureTask(JobConf job, Task task,
    Credentials credentials, Token<JobTokenIdentifier> jt) throws IOException {
  job.setCredentials(credentials);
  
  ApplicationAttemptId appAttemptId =
      ConverterUtils.toContainerId(
          System.getenv(Environment.CONTAINER_ID.name()))
          .getApplicationAttemptId();
  LOG.debug("APPLICATION_ATTEMPT_ID: " + appAttemptId);
  // Set it in conf, so as to be able to be used the the OutputCommitter.
  job.setInt(MRJobConfig.APPLICATION_ATTEMPT_ID,
      appAttemptId.getAttemptId());

  // set tcp nodelay
  job.setBoolean("ipc.client.tcpnodelay", true);
  job.setClass(MRConfig.TASK_LOCAL_OUTPUT_CLASS,
      YarnOutputFiles.class, MapOutputFile.class);
  // set the jobToken and shuffle secrets into task
  task.setJobTokenSecret(
      JobTokenSecretManager.createSecretKey(jt.getPassword()));
  byte[] shuffleSecret = TokenCache.getShuffleSecretKey(credentials);
  if (shuffleSecret == null) {
    LOG.warn("Shuffle secret missing from task credentials."
        + " Using job token secret as shuffle secret.");
    shuffleSecret = jt.getPassword();
  }
  task.setShuffleSecret(
      JobTokenSecretManager.createSecretKey(shuffleSecret));

  // setup the child's MRConfig.LOCAL_DIR.
  configureLocalDirs(task, job);

  // setup the child's attempt directories
  // Do the task-type specific localization
  task.localizeConfiguration(job);

  // Set up the DistributedCache related configs
  MRApps.setupDistributedCacheLocal(job);

  // Overwrite the localized task jobconf which is linked to in the current
  // work-dir.
  Path localTaskFile = new Path(MRJobConfig.JOB_CONF_FILE);
  writeLocalJobFile(localTaskFile, job);
  task.setJobFile(localTaskFile.toString());
  task.setConf(job);
}
 
Example 9
Source File: MRApps.java    From big-c with Apache License 2.0 4 votes vote down vote up
public static String getJobFile(Configuration conf, String user, 
    org.apache.hadoop.mapreduce.JobID jobId) {
  Path jobFile = new Path(MRApps.getStagingAreaDir(conf, user),
      jobId.toString() + Path.SEPARATOR + MRJobConfig.JOB_CONF_FILE);
  return jobFile.toString();
}
 
Example 10
Source File: TestHsWebServicesJobConf.java    From big-c with Apache License 2.0 4 votes vote down vote up
@Override
protected void configureServlets() {

  Path confPath = new Path(testConfDir.toString(),
      MRJobConfig.JOB_CONF_FILE);
  Configuration config = new Configuration();

  FileSystem localFs;
  try {
    localFs = FileSystem.getLocal(config);
    confPath = localFs.makeQualified(confPath);

    OutputStream out = localFs.create(confPath);
    try {
      conf.writeXml(out);
    } finally {
      out.close();
    }
    if (!localFs.exists(confPath)) {
      fail("error creating config file: " + confPath);
    }

  } catch (IOException e) {
    fail("error creating config file: " + e.getMessage());
  }

  appContext = new MockHistoryContext(0, 2, 1, confPath);

  webApp = mock(HsWebApp.class);
  when(webApp.name()).thenReturn("hsmockwebapp");

  bind(JAXBContextResolver.class);
  bind(HsWebServices.class);
  bind(GenericExceptionHandler.class);
  bind(WebApp.class).toInstance(webApp);
  bind(AppContext.class).toInstance(appContext);
  bind(HistoryContext.class).toInstance(appContext);
  bind(Configuration.class).toInstance(conf);

  serve("/*").with(GuiceContainer.class);
}
 
Example 11
Source File: HadoopUtils.java    From ignite with Apache License 2.0 2 votes vote down vote up
/**
 * Gets job file.
 *
 * @param conf Configuration.
 * @param usr User.
 * @param jobId Job ID.
 * @return Job file.
 */
public static Path jobFile(Configuration conf, String usr, JobID jobId) {
    return new Path(stagingAreaDir(conf, usr), jobId.toString() + Path.SEPARATOR + MRJobConfig.JOB_CONF_FILE);
}