Java Code Examples for org.apache.hadoop.mapred.JobConf#getClass()

The following examples show how to use org.apache.hadoop.mapred.JobConf#getClass() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PipeReducer.java    From RDFS with Apache License 2.0 6 votes vote down vote up
public void configure(JobConf job) {
    super.configure(job);
    Class<?> c = job.getClass("stream.reduce.posthook", null, Mapper.class);
    if(c != null) {
        postMapper = (Mapper)ReflectionUtils.newInstance(c, job);
        LOG.info("PostHook="+c.getName());
    }

    c = job.getClass("stream.reduce.prehook", null, Reducer.class);
    if(c != null) {
        preReducer = (Reducer)ReflectionUtils.newInstance(c, job);
        oc = new InmemBufferingOutputCollector();
        LOG.info("PreHook="+c.getName());
    }
    this.ignoreKey = job.getBoolean("stream.reduce.ignoreKey", false);
}
 
Example 2
Source File: Parser.java    From hadoop with Apache License 2.0 6 votes vote down vote up
/**
 * Given an expression and an optional comparator, build a tree of
 * InputFormats using the comparator to sort keys.
 */
static Node parse(String expr, JobConf job) throws IOException {
  if (null == expr) {
    throw new IOException("Expression is null");
  }
  Class<? extends WritableComparator> cmpcl =
    job.getClass("mapred.join.keycomparator", null, WritableComparator.class);
  Lexer lex = new Lexer(expr);
  Stack<Token> st = new Stack<Token>();
  Token tok;
  while ((tok = lex.next()) != null) {
    if (TType.RPAREN.equals(tok.getType())) {
      st.push(reduce(st, job));
    } else {
      st.push(tok);
    }
  }
  if (st.size() == 1 && TType.CIF.equals(st.peek().getType())) {
    Node ret = st.pop().getNode();
    if (cmpcl != null) {
      ret.setKeyComparator(cmpcl);
    }
    return ret;
  }
  throw new IOException("Missing ')'");
}
 
Example 3
Source File: Parser.java    From RDFS with Apache License 2.0 6 votes vote down vote up
/**
 * Given an expression and an optional comparator, build a tree of
 * InputFormats using the comparator to sort keys.
 */
static Node parse(String expr, JobConf job) throws IOException {
  if (null == expr) {
    throw new IOException("Expression is null");
  }
  Class<? extends WritableComparator> cmpcl =
    job.getClass("mapred.join.keycomparator", null, WritableComparator.class);
  Lexer lex = new Lexer(expr);
  Stack<Token> st = new Stack<Token>();
  Token tok;
  while ((tok = lex.next()) != null) {
    if (TType.RPAREN.equals(tok.getType())) {
      st.push(reduce(st, job));
    } else {
      st.push(tok);
    }
  }
  if (st.size() == 1 && TType.CIF.equals(st.peek().getType())) {
    Node ret = st.pop().getNode();
    if (cmpcl != null) {
      ret.setKeyComparator(cmpcl);
    }
    return ret;
  }
  throw new IOException("Missing ')'");
}
 
Example 4
Source File: Parser.java    From big-c with Apache License 2.0 6 votes vote down vote up
/**
 * Given an expression and an optional comparator, build a tree of
 * InputFormats using the comparator to sort keys.
 */
static Node parse(String expr, JobConf job) throws IOException {
  if (null == expr) {
    throw new IOException("Expression is null");
  }
  Class<? extends WritableComparator> cmpcl =
    job.getClass("mapred.join.keycomparator", null, WritableComparator.class);
  Lexer lex = new Lexer(expr);
  Stack<Token> st = new Stack<Token>();
  Token tok;
  while ((tok = lex.next()) != null) {
    if (TType.RPAREN.equals(tok.getType())) {
      st.push(reduce(st, job));
    } else {
      st.push(tok);
    }
  }
  if (st.size() == 1 && TType.CIF.equals(st.peek().getType())) {
    Node ret = st.pop().getNode();
    if (cmpcl != null) {
      ret.setKeyComparator(cmpcl);
    }
    return ret;
  }
  throw new IOException("Missing ')'");
}
 
Example 5
Source File: TestWrappedRecordReaderClassloader.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public RR_ClassLoaderChecker(JobConf job) {
  assertTrue("The class loader has not been inherited from "
      + CompositeRecordReader.class.getSimpleName(),
      job.getClassLoader() instanceof Fake_ClassLoader);

  keyclass = (Class<? extends K>) job.getClass("test.fakeif.keyclass",
      NullWritable.class, WritableComparable.class);
  valclass = (Class<? extends V>) job.getClass("test.fakeif.valclass",
      NullWritable.class, WritableComparable.class);
}
 
Example 6
Source File: PipeMapper.java    From RDFS with Apache License 2.0 5 votes vote down vote up
public void configure(JobConf job) {
  super.configure(job);
  String inputFormatClassName = job.getClass("mapred.input.format.class", TextInputFormat.class).getCanonicalName();
  this.ignoreKey = inputFormatClassName.equals(TextInputFormat.class.getCanonicalName()) ||
      job.getBoolean("stream.map.ignoreKey", false);

  this.skipNewline = job.getBoolean("stream.map.skipNewline", false);

  Class<?> c = job.getClass("stream.map.posthook", null, Mapper.class);
  if(c != null) {
      postMapper = (Mapper)ReflectionUtils.newInstance(c, job);
      LOG.info("PostHook="+c.getName());
  }
}
 
Example 7
Source File: TestDatamerge.java    From RDFS with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public void configure(JobConf job) {
  keyclass = (Class<? extends K>) job.getClass("test.fakeif.keyclass",
IncomparableKey.class, WritableComparable.class);
  valclass = (Class<? extends V>) job.getClass("test.fakeif.valclass",
NullWritable.class, WritableComparable.class);
}
 
Example 8
Source File: TestShufflePlugin.java    From big-c with Apache License 2.0 5 votes vote down vote up
@Test
/**
 * A testing method instructing core hadoop to load an external ShuffleConsumerPlugin
 * as if it came from a 3rd party.
 */
public void testPluginAbility() {

  try{
    // create JobConf with mapreduce.job.shuffle.consumer.plugin=TestShuffleConsumerPlugin
    JobConf jobConf = new JobConf();
    jobConf.setClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN,
                     TestShufflePlugin.TestShuffleConsumerPlugin.class,
                     ShuffleConsumerPlugin.class);

    ShuffleConsumerPlugin shuffleConsumerPlugin = null;
    Class<? extends ShuffleConsumerPlugin> clazz =
      jobConf.getClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN, Shuffle.class, ShuffleConsumerPlugin.class);
    assertNotNull("Unable to get " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, clazz);

    // load 3rd party plugin through core's factory method
    shuffleConsumerPlugin = ReflectionUtils.newInstance(clazz, jobConf);
    assertNotNull("Unable to load " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, shuffleConsumerPlugin);
  }
  catch (Exception e) {
    assertTrue("Threw exception:" + e, false);
  }
}
 
Example 9
Source File: TestDatamerge.java    From big-c with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public void configure(JobConf job) {
  keyclass = (Class<? extends K>) job.getClass("test.fakeif.keyclass",
IncomparableKey.class, WritableComparable.class);
  valclass = (Class<? extends V>) job.getClass("test.fakeif.valclass",
NullWritable.class, WritableComparable.class);
}
 
Example 10
Source File: TestWrappedRecordReaderClassloader.java    From big-c with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public RR_ClassLoaderChecker(JobConf job) {
  assertTrue("The class loader has not been inherited from "
      + CompositeRecordReader.class.getSimpleName(),
      job.getClassLoader() instanceof Fake_ClassLoader);

  keyclass = (Class<? extends K>) job.getClass("test.fakeif.keyclass",
      NullWritable.class, WritableComparable.class);
  valclass = (Class<? extends V>) job.getClass("test.fakeif.valclass",
      NullWritable.class, WritableComparable.class);
}
 
Example 11
Source File: TestWrappedRecordReaderClassloader.java    From big-c with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public void configure(JobConf job) {
  keyclass = (Class<? extends K>) job.getClass("test.fakeif.keyclass",
      NullWritable.class, WritableComparable.class);
  valclass = (Class<? extends V>) job.getClass("test.fakeif.valclass",
      NullWritable.class, WritableComparable.class);
}
 
Example 12
Source File: TestShufflePlugin.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@Test
/**
 * A testing method instructing core hadoop to load an external ShuffleConsumerPlugin
 * as if it came from a 3rd party.
 */
public void testPluginAbility() {

  try{
    // create JobConf with mapreduce.job.shuffle.consumer.plugin=TestShuffleConsumerPlugin
    JobConf jobConf = new JobConf();
    jobConf.setClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN,
                     TestShufflePlugin.TestShuffleConsumerPlugin.class,
                     ShuffleConsumerPlugin.class);

    ShuffleConsumerPlugin shuffleConsumerPlugin = null;
    Class<? extends ShuffleConsumerPlugin> clazz =
      jobConf.getClass(MRConfig.SHUFFLE_CONSUMER_PLUGIN, Shuffle.class, ShuffleConsumerPlugin.class);
    assertNotNull("Unable to get " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, clazz);

    // load 3rd party plugin through core's factory method
    shuffleConsumerPlugin = ReflectionUtils.newInstance(clazz, jobConf);
    assertNotNull("Unable to load " + MRConfig.SHUFFLE_CONSUMER_PLUGIN, shuffleConsumerPlugin);
  }
  catch (Exception e) {
    assertTrue("Threw exception:" + e, false);
  }
}
 
Example 13
Source File: TestDatamerge.java    From hadoop-gpu with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public void configure(JobConf job) {
  keyclass = (Class<? extends K>) job.getClass("test.fakeif.keyclass",
IncomparableKey.class, WritableComparable.class);
  valclass = (Class<? extends V>) job.getClass("test.fakeif.valclass",
NullWritable.class, WritableComparable.class);
}
 
Example 14
Source File: TestWrappedRecordReaderClassloader.java    From hadoop with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public void configure(JobConf job) {
  keyclass = (Class<? extends K>) job.getClass("test.fakeif.keyclass",
      NullWritable.class, WritableComparable.class);
  valclass = (Class<? extends V>) job.getClass("test.fakeif.valclass",
      NullWritable.class, WritableComparable.class);
}
 
Example 15
Source File: Submitter.java    From big-c with Apache License 2.0 4 votes vote down vote up
/**
 * Get the user's original partitioner.
 * @param conf the configuration to look in
 * @return the class that the user submitted
 */
static Class<? extends Partitioner> getJavaPartitioner(JobConf conf) {
  return conf.getClass(Submitter.PARTITIONER, 
                       HashPartitioner.class,
                       Partitioner.class);
}
 
Example 16
Source File: Submitter.java    From RDFS with Apache License 2.0 4 votes vote down vote up
/**
 * Get the user's original partitioner.
 * @param conf the configuration to look in
 * @return the class that the user submitted
 */
static Class<? extends Partitioner> getJavaPartitioner(JobConf conf) {
  return conf.getClass("hadoop.pipes.partitioner", 
                       HashPartitioner.class,
                       Partitioner.class);
}
 
Example 17
Source File: GenericMRLoadJobCreator.java    From RDFS with Apache License 2.0 4 votes vote down vote up
public static JobConf createJob(String[] argv, boolean mapoutputCompressed,
    boolean outputCompressed) throws Exception {

  JobConf job = new JobConf();
  job.setJarByClass(GenericMRLoadGenerator.class);
  job.setMapperClass(SampleMapper.class);
  job.setReducerClass(SampleReducer.class);
  if (!parseArgs(argv, job)) {
    return null;
  }

  if (null == FileOutputFormat.getOutputPath(job)) {
    // No output dir? No writes
    job.setOutputFormat(NullOutputFormat.class);
  }

  if (0 == FileInputFormat.getInputPaths(job).length) {
    // No input dir? Generate random data
    System.err.println("No input path; ignoring InputFormat");
    confRandom(job);
  } else if (null != job.getClass("mapred.indirect.input.format", null)) {
    // specified IndirectInputFormat? Build src list
    JobClient jClient = new JobClient(job);
    Path sysdir = jClient.getSystemDir();
    Random r = new Random();
    Path indirInputFile = new Path(sysdir, Integer.toString(r
        .nextInt(Integer.MAX_VALUE), 36)
        + "_files");
    job.set("mapred.indirect.input.file", indirInputFile.toString());
    SequenceFile.Writer writer = SequenceFile.createWriter(sysdir
        .getFileSystem(job), job, indirInputFile, LongWritable.class,
        Text.class, SequenceFile.CompressionType.NONE);
    try {
      for (Path p : FileInputFormat.getInputPaths(job)) {
        FileSystem fs = p.getFileSystem(job);
        Stack<Path> pathstack = new Stack<Path>();
        pathstack.push(p);
        while (!pathstack.empty()) {
          for (FileStatus stat : fs.listStatus(pathstack.pop())) {
            if (stat.isDir()) {
              if (!stat.getPath().getName().startsWith("_")) {
                pathstack.push(stat.getPath());
              }
            } else {
              writer.sync();
              writer.append(new LongWritable(stat.getLen()), new Text(stat
                  .getPath().toUri().toString()));
            }
          }
        }
      }
    } finally {
      writer.close();
    }
  }

  job.setCompressMapOutput(mapoutputCompressed);
  job.setBoolean("mapred.output.compress", outputCompressed);
  return job;

}
 
Example 18
Source File: Submitter.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/**
 * Get the user's original partitioner.
 * @param conf the configuration to look in
 * @return the class that the user submitted
 */
static Class<? extends Partitioner> getJavaPartitioner(JobConf conf) {
  return conf.getClass(Submitter.PARTITIONER, 
                       HashPartitioner.class,
                       Partitioner.class);
}
 
Example 19
Source File: Submitter.java    From hadoop-gpu with Apache License 2.0 4 votes vote down vote up
/**
 * Get the user's original partitioner.
 * @param conf the configuration to look in
 * @return the class that the user submitted
 */
static Class<? extends Partitioner> getJavaPartitioner(JobConf conf) {
  return conf.getClass("hadoop.pipes.partitioner", 
                       HashPartitioner.class,
                       Partitioner.class);
}
 
Example 20
Source File: GenericMRLoadJobCreator.java    From hadoop-gpu with Apache License 2.0 4 votes vote down vote up
public static JobConf createJob(String[] argv, boolean mapoutputCompressed,
    boolean outputCompressed) throws Exception {

  JobConf job = new JobConf();
  job.setJarByClass(GenericMRLoadGenerator.class);
  job.setMapperClass(SampleMapper.class);
  job.setReducerClass(SampleReducer.class);
  if (!parseArgs(argv, job)) {
    return null;
  }

  if (null == FileOutputFormat.getOutputPath(job)) {
    // No output dir? No writes
    job.setOutputFormat(NullOutputFormat.class);
  }

  if (0 == FileInputFormat.getInputPaths(job).length) {
    // No input dir? Generate random data
    System.err.println("No input path; ignoring InputFormat");
    confRandom(job);
  } else if (null != job.getClass("mapred.indirect.input.format", null)) {
    // specified IndirectInputFormat? Build src list
    JobClient jClient = new JobClient(job);
    Path sysdir = jClient.getSystemDir();
    Random r = new Random();
    Path indirInputFile = new Path(sysdir, Integer.toString(r
        .nextInt(Integer.MAX_VALUE), 36)
        + "_files");
    job.set("mapred.indirect.input.file", indirInputFile.toString());
    SequenceFile.Writer writer = SequenceFile.createWriter(sysdir
        .getFileSystem(job), job, indirInputFile, LongWritable.class,
        Text.class, SequenceFile.CompressionType.NONE);
    try {
      for (Path p : FileInputFormat.getInputPaths(job)) {
        FileSystem fs = p.getFileSystem(job);
        Stack<Path> pathstack = new Stack<Path>();
        pathstack.push(p);
        while (!pathstack.empty()) {
          for (FileStatus stat : fs.listStatus(pathstack.pop())) {
            if (stat.isDir()) {
              if (!stat.getPath().getName().startsWith("_")) {
                pathstack.push(stat.getPath());
              }
            } else {
              writer.sync();
              writer.append(new LongWritable(stat.getLen()), new Text(stat
                  .getPath().toUri().toString()));
            }
          }
        }
      }
    } finally {
      writer.close();
    }
  }

  job.setCompressMapOutput(mapoutputCompressed);
  job.setBoolean("mapred.output.compress", outputCompressed);
  return job;

}