Java Code Examples for org.apache.hadoop.util.ReflectionUtils.newInstance()

The following are Jave code examples for showing how to use newInstance() of the org.apache.hadoop.util.ReflectionUtils class. You can vote up the examples you like. Your votes will be used in our system to get more good examples.
+ Save this method
Example 1
Project: hadoop   File: TestBinaryPartitioner.java   View Source Code Vote up 7 votes
public void testCustomOffsets() {
  Configuration conf = new Configuration();
  BinaryComparable key1 = new BytesWritable(new byte[] { 1, 2, 3, 4, 5 }); 
  BinaryComparable key2 = new BytesWritable(new byte[] { 6, 2, 3, 7, 8 });
  
  BinaryPartitioner.setOffsets(conf, 1, -3);
  BinaryPartitioner<?> partitioner = 
    ReflectionUtils.newInstance(BinaryPartitioner.class, conf);
  int partition1 = partitioner.getPartition(key1, null, 10);
  int partition2 = partitioner.getPartition(key2, null, 10);
  assertEquals(partition1, partition2);
  
  BinaryPartitioner.setOffsets(conf, 1, 2);
  partitioner = ReflectionUtils.newInstance(BinaryPartitioner.class, conf);
  partition1 = partitioner.getPartition(key1, null, 10);
  partition2 = partitioner.getPartition(key2, null, 10);
  assertEquals(partition1, partition2);
  
  BinaryPartitioner.setOffsets(conf, -4, -3);
  partitioner = ReflectionUtils.newInstance(BinaryPartitioner.class, conf);
  partition1 = partitioner.getPartition(key1, null, 10);
  partition2 = partitioner.getPartition(key2, null, 10);
  assertEquals(partition1, partition2);
}
 
Example 2
Project: aliyun-maxcompute-data-collectors   File: HBaseBulkImportMapper.java   View Source Code Vote up 6 votes
@Override
protected void setup(Context context)
    throws IOException, InterruptedException {
  this.conf = context.getConfiguration();
  this.lobLoader = new LargeObjectLoader(this.conf, new Path( this.conf.get("sqoop.hbase.lob.extern.dir", "/tmp/sqoop-hbase-" + context.getTaskAttemptID())));

  // Get the implementation of PutTransformer to use.
  // By default, we call toString() on every non-null field.
  Class<? extends PutTransformer> xformerClass =
      (Class<? extends PutTransformer>)
      this.conf.getClass(TRANSFORMER_CLASS_KEY, ToStringPutTransformer.class);
  this.putTransformer = (PutTransformer)
      ReflectionUtils.newInstance(xformerClass, this.conf);
  if (null == putTransformer) {
    throw new RuntimeException("Could not instantiate PutTransformer.");
  }
  this.putTransformer.setColumnFamily(conf.get(COL_FAMILY_KEY, null));
  this.putTransformer.setRowKeyColumn(conf.get(ROW_KEY_COLUMN_KEY, null));
}
 
Example 3
Project: hadoop-oss   File: SortedMapWritable.java   View Source Code Vote up 6 votes
@SuppressWarnings("unchecked")
@Override
public void readFields(DataInput in) throws IOException {
  super.readFields(in);
  
  // Read the number of entries in the map
  
  int entries = in.readInt();
  
  // Then read each key/value pair
  
  for (int i = 0; i < entries; i++) {
    K key =
      (K) ReflectionUtils.newInstance(getClass(
          in.readByte()), getConf());
    
    key.readFields(in);
    
    Writable value = (Writable) ReflectionUtils.newInstance(getClass(
        in.readByte()), getConf());
    
    value.readFields(in);
    instance.put(key, value);
  }
}
 
Example 4
Project: hadoop   File: TestDFSIO.java   View Source Code Vote up 6 votes
@Override // Mapper
public void configure(JobConf conf) {
  super.configure(conf);

  // grab compression
  String compression = getConf().get("test.io.compression.class", null);
  Class<? extends CompressionCodec> codec;

  // try to initialize codec
  try {
    codec = (compression == null) ? null : 
      Class.forName(compression).asSubclass(CompressionCodec.class);
  } catch(Exception e) {
    throw new RuntimeException("Compression codec not found: ", e);
  }

  if(codec != null) {
    compressionCodec = (CompressionCodec)
        ReflectionUtils.newInstance(codec, getConf());
  }
}
 
Example 5
Project: hadoop   File: TestWritable.java   View Source Code Vote up 6 votes
/** Utility method for testing writables. */
public static Writable testWritable(Writable before
		, Configuration conf) throws Exception {
  DataOutputBuffer dob = new DataOutputBuffer();
  before.write(dob);

  DataInputBuffer dib = new DataInputBuffer();
  dib.reset(dob.getData(), dob.getLength());
  
  Writable after = (Writable)ReflectionUtils.newInstance(
  		before.getClass(), conf);
  after.readFields(dib);

  assertEquals(before, after);
  return after;
}
 
Example 6
Project: ditb   File: IndexFile.java   View Source Code Vote up 6 votes
@SuppressWarnings("unchecked")
public void readFields(DataInput in) throws IOException {
  // First clear the map. Otherwise we will just accumulate
  // entries every time this method is called.
  this.instance.clear();
  // Read the number of entries in the map
  int entries = in.readInt();
  // Then read each key/value pair
  for (int i = 0; i < entries; i++) {
    byte[] key = Bytes.readByteArray(in);
    byte id = in.readByte();
    Class clazz = getClass(id);
    V value = null;
    if (clazz.equals(byte[].class)) {
      byte[] bytes = Bytes.readByteArray(in);
      value = (V) bytes;
    } else {
      Writable w = (Writable) ReflectionUtils.newInstance(clazz, getConf());
      w.readFields(in);
      value = (V) w;
    }
    this.instance.put(key, value);
  }
}
 
Example 7
Project: hadoop   File: TestRecovery.java   View Source Code Vote up 6 votes
private void writeBadOutput(TaskAttempt attempt, Configuration conf)
  throws Exception {
  TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, 
      TypeConverter.fromYarn(attempt.getID()));
 
  TextOutputFormat<?, ?> theOutputFormat = new TextOutputFormat();
  RecordWriter theRecordWriter = theOutputFormat
      .getRecordWriter(tContext);
  
  NullWritable nullWritable = NullWritable.get();
  try {
    theRecordWriter.write(key2, val2);
    theRecordWriter.write(null, nullWritable);
    theRecordWriter.write(null, val2);
    theRecordWriter.write(nullWritable, val1);
    theRecordWriter.write(key1, nullWritable);
    theRecordWriter.write(key2, null);
    theRecordWriter.write(null, null);
    theRecordWriter.write(key1, val1);
  } finally {
    theRecordWriter.close(tContext);
  }
  
  OutputFormat outputFormat = ReflectionUtils.newInstance(
      tContext.getOutputFormatClass(), conf);
  OutputCommitter committer = outputFormat.getOutputCommitter(tContext);
  committer.commitTask(tContext);
}
 
Example 8
Project: hadoop   File: Chain.java   View Source Code Vote up 5 votes
@SuppressWarnings("unchecked")
private void writeToQueue(KEYOUT key, VALUEOUT value) throws IOException,
    InterruptedException {
  this.keyout = (KEYOUT) ReflectionUtils.newInstance(keyClass, conf);
  this.valueout = (VALUEOUT) ReflectionUtils.newInstance(valueClass, conf);
  ReflectionUtils.copy(conf, key, this.keyout);
  ReflectionUtils.copy(conf, value, this.valueout);

  // wait to write output to queuue
  outputQueue.enqueue(new KeyValuePair<KEYOUT, VALUEOUT>(keyout, valueout));
}
 
Example 9
Project: hadoop   File: MergeManagerImpl.java   View Source Code Vote up 5 votes
private void combineAndSpill(
    RawKeyValueIterator kvIter,
    Counters.Counter inCounter) throws IOException {
  JobConf job = jobConf;
  Reducer combiner = ReflectionUtils.newInstance(combinerClass, job);
  Class<K> keyClass = (Class<K>) job.getMapOutputKeyClass();
  Class<V> valClass = (Class<V>) job.getMapOutputValueClass();
  RawComparator<K> comparator = 
    (RawComparator<K>)job.getCombinerKeyGroupingComparator();
  try {
    CombineValuesIterator values = new CombineValuesIterator(
        kvIter, comparator, keyClass, valClass, job, Reporter.NULL,
        inCounter);
    while (values.more()) {
      combiner.reduce(values.getKey(), values, combineCollector,
                      Reporter.NULL);
      values.nextKey();
    }
  } finally {
    combiner.close();
  }
}
 
Example 10
Project: hadoop   File: WritableFactories.java   View Source Code Vote up 5 votes
/** Create a new instance of a class with a defined factory. */
public static Writable newInstance(Class<? extends Writable> c, Configuration conf) {
  WritableFactory factory = WritableFactories.getFactory(c);
  if (factory != null) {
    Writable result = factory.newInstance();
    if (result instanceof Configurable) {
      ((Configurable) result).setConf(conf);
    }
    return result;
  } else {
    return ReflectionUtils.newInstance(c, conf);
  }
}
 
Example 11
Project: hadoop-oss   File: Token.java   View Source Code Vote up 5 votes
/**
 * Get the token identifier object, or null if it could not be constructed
 * (because the class could not be loaded, for example).
 * @return the token identifier, or null
 * @throws IOException 
 */
@SuppressWarnings("unchecked")
public T decodeIdentifier() throws IOException {
  Class<? extends TokenIdentifier> cls = getClassForIdentifier(getKind());
  if (cls == null) {
    return null;
  }
  TokenIdentifier tokenIdentifier = ReflectionUtils.newInstance(cls, null);
  ByteArrayInputStream buf = new ByteArrayInputStream(identifier);
  DataInputStream in = new DataInputStream(buf);  
  tokenIdentifier.readFields(in);
  in.close();
  return (T) tokenIdentifier;
}
 
Example 12
Project: ditb   File: IntegrationTestingUtility.java   View Source Code Vote up 5 votes
public void createDistributedHBaseCluster() throws IOException {
  Configuration conf = getConfiguration();
  Class<? extends ClusterManager> clusterManagerClass = conf.getClass(HBASE_CLUSTER_MANAGER_CLASS,
    DEFAULT_HBASE_CLUSTER_MANAGER_CLASS, ClusterManager.class);
  ClusterManager clusterManager = ReflectionUtils.newInstance(
    clusterManagerClass, conf);
  setHBaseCluster(new DistributedHBaseCluster(conf, clusterManager));
  getHBaseAdmin();
}
 
Example 13
Project: hadoop   File: PipeMapRed.java   View Source Code Vote up 5 votes
OutputReader createOutputReader(Class<? extends OutputReader> outputReaderClass) 
  throws IOException {
  OutputReader outputReader =
    ReflectionUtils.newInstance(outputReaderClass, job_);
  outputReader.initialize(this);
  return outputReader;
}
 
Example 14
Project: hadoop   File: TestAvailableSpaceVolumeChoosingPolicy.java   View Source Code Vote up 5 votes
@Test(timeout=60000)
public void testThreeUnbalancedVolumes() throws Exception {
  @SuppressWarnings("unchecked")
  final AvailableSpaceVolumeChoosingPolicy<FsVolumeSpi> policy = 
      ReflectionUtils.newInstance(AvailableSpaceVolumeChoosingPolicy.class, null);
  
  List<FsVolumeSpi> volumes = new ArrayList<FsVolumeSpi>();
  
  // First volume with 1MB free space
  volumes.add(Mockito.mock(FsVolumeSpi.class));
  Mockito.when(volumes.get(0).getAvailable()).thenReturn(1024L * 1024L);
  
  // Second volume with 3MB free space, which is a difference of 2MB, more
  // than the threshold of 1MB.
  volumes.add(Mockito.mock(FsVolumeSpi.class));
  Mockito.when(volumes.get(1).getAvailable()).thenReturn(1024L * 1024L * 3);
  
  // Third volume, again with 3MB free space.
  volumes.add(Mockito.mock(FsVolumeSpi.class));
  Mockito.when(volumes.get(2).getAvailable()).thenReturn(1024L * 1024L * 3);
  
  // We should alternate assigning between the two volumes with a lot of free
  // space.
  initPolicy(policy, 1.0f);
  Assert.assertEquals(volumes.get(1), policy.chooseVolume(volumes, 100));
  Assert.assertEquals(volumes.get(2), policy.chooseVolume(volumes, 100));
  Assert.assertEquals(volumes.get(1), policy.chooseVolume(volumes, 100));
  Assert.assertEquals(volumes.get(2), policy.chooseVolume(volumes, 100));

  // All writes should be assigned to the volume with the least free space.
  initPolicy(policy, 0.0f);
  Assert.assertEquals(volumes.get(0), policy.chooseVolume(volumes, 100));
  Assert.assertEquals(volumes.get(0), policy.chooseVolume(volumes, 100));
  Assert.assertEquals(volumes.get(0), policy.chooseVolume(volumes, 100));
  Assert.assertEquals(volumes.get(0), policy.chooseVolume(volumes, 100));
}
 
Example 15
Project: hadoop   File: TestCodec.java   View Source Code Vote up 5 votes
@Test
public void testGzipCompatibility() throws IOException {
  Random r = new Random();
  long seed = r.nextLong();
  r.setSeed(seed);
  LOG.info("seed: " + seed);

  DataOutputBuffer dflbuf = new DataOutputBuffer();
  GZIPOutputStream gzout = new GZIPOutputStream(dflbuf);
  byte[] b = new byte[r.nextInt(128 * 1024 + 1)];
  r.nextBytes(b);
  gzout.write(b);
  gzout.close();

  DataInputBuffer gzbuf = new DataInputBuffer();
  gzbuf.reset(dflbuf.getData(), dflbuf.getLength());

  Configuration conf = new Configuration();
  conf.setBoolean(CommonConfigurationKeys.IO_NATIVE_LIB_AVAILABLE_KEY, false);
  CompressionCodec codec = ReflectionUtils.newInstance(GzipCodec.class, conf);
  Decompressor decom = codec.createDecompressor();
  assertNotNull(decom);
  assertEquals(BuiltInGzipDecompressor.class, decom.getClass());
  InputStream gzin = codec.createInputStream(gzbuf, decom);

  dflbuf.reset();
  IOUtils.copyBytes(gzin, dflbuf, 4096);
  final byte[] dflchk = Arrays.copyOf(dflbuf.getData(), dflbuf.getLength());
  assertArrayEquals(b, dflchk);
}
 
Example 16
Project: aliyun-maxcompute-data-collectors   File: InjectableConnManager.java   View Source Code Vote up 5 votes
/**
 * Allow the user to inject custom mapper, input, and output formats
 * into the importTable() process.
 */
@Override
@SuppressWarnings("unchecked")
public void importTable(ImportJobContext context)
    throws IOException, ImportException {

  SqoopOptions options = context.getOptions();
  Configuration conf = options.getConf();

  Class<? extends Mapper> mapperClass = (Class<? extends Mapper>)
      conf.getClass(MAPPER_KEY, Mapper.class);
  Class<? extends InputFormat> ifClass = (Class<? extends InputFormat>)
      conf.getClass(INPUT_FORMAT_KEY, TextInputFormat.class);
  Class<? extends OutputFormat> ofClass = (Class<? extends OutputFormat>)
      conf.getClass(OUTPUT_FORMAT_KEY, TextOutputFormat.class);

  Class<? extends ImportJobBase> jobClass = (Class<? extends ImportJobBase>)
      conf.getClass(IMPORT_JOB_KEY, ImportJobBase.class);

  String tableName = context.getTableName();

  // Instantiate the user's chosen ImportJobBase instance.
  ImportJobBase importJob = ReflectionUtils.newInstance(jobClass, conf);

  // And configure the dependencies to inject
  importJob.setOptions(options);
  importJob.setMapperClass(mapperClass);
  importJob.setInputFormatClass(ifClass);
  importJob.setOutputFormatClass(ofClass);

  importJob.runImport(tableName, context.getJarFile(),
      getSplitColumn(options, tableName), conf);
}
 
Example 17
Project: hadoop-oss   File: MapFile.java   View Source Code Vote up 4 votes
/**
 * Merge all input files to output map file.<br>
 * 1. Read first key/value from all input files to keys/values array. <br>
 * 2. Select the least key and corresponding value. <br>
 * 3. Write the selected key and value to output file. <br>
 * 4. Replace the already written key/value in keys/values arrays with the
 * next key/value from the selected input <br>
 * 5. Repeat step 2-4 till all keys are read. <br>
 */
private void mergePass() throws IOException {
  // re-usable array
  WritableComparable[] keys = new WritableComparable[inReaders.length];
  Writable[] values = new Writable[inReaders.length];
  // Read first key/value from all inputs
  for (int i = 0; i < inReaders.length; i++) {
    keys[i] = ReflectionUtils.newInstance(keyClass, null);
    values[i] = ReflectionUtils.newInstance(valueClass, null);
    if (!inReaders[i].next(keys[i], values[i])) {
      // Handle empty files
      keys[i] = null;
      values[i] = null;
    }
  }

  do {
    int currentEntry = -1;
    WritableComparable currentKey = null;
    Writable currentValue = null;
    for (int i = 0; i < keys.length; i++) {
      if (keys[i] == null) {
        // Skip Readers reached EOF
        continue;
      }
      if (currentKey == null || comparator.compare(currentKey, keys[i]) > 0) {
        currentEntry = i;
        currentKey = keys[i];
        currentValue = values[i];
      }
    }
    if (currentKey == null) {
      // Merge Complete
      break;
    }
    // Write the selected key/value to merge stream
    outWriter.append(currentKey, currentValue);
    // Replace the already written key/value in keys/values arrays with the
    // next key/value from the selected input
    if (!inReaders[currentEntry].next(keys[currentEntry],
        values[currentEntry])) {
      // EOF for this file
      keys[currentEntry] = null;
      values[currentEntry] = null;
    }
  } while (true);
}
 
Example 18
Project: hadoop   File: MapReduceTestUtil.java   View Source Code Vote up 4 votes
public K getCurrentKey() {
  return ReflectionUtils.newInstance(keyclass, null);
}
 
Example 19
Project: aliyun-maxcompute-data-collectors   File: TestBoundaryQuery.java   View Source Code Vote up 4 votes
public void runQueryTest(String query, boolean tableImport,
    int numExpectedResults, int expectedSum, String targetDir,
    String... extraArgs) throws IOException {

  ClassLoader prevClassLoader = null;
  SequenceFile.Reader reader = null;

  String [] argv = getArgv(true, tableImport, query, targetDir, extraArgs);
  runImport(argv);
  try {
    SqoopOptions opts = new ImportTool().parseArguments(
        getArgv(false, tableImport, query, targetDir, extraArgs),
        null, null, true);

    CompilationManager compileMgr = new CompilationManager(opts);
    String jarFileName = compileMgr.getJarFilename();

    prevClassLoader = ClassLoaderStack.addJarFile(jarFileName,
        getTableName());

    reader = SeqFileReader.getSeqFileReader(getDataFilePath().toString());

    // here we can actually instantiate (k, v) pairs.
    Configuration conf = new Configuration();
    Object key = ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Object val = ReflectionUtils.newInstance(reader.getValueClass(), conf);

    if (reader.next(key) == null) {
      fail("Empty SequenceFile during import");
    }

    // make sure that the value we think should be at the top, is.
    reader.getCurrentValue(val);

    // We know that these values are two ints separated by a ',' character.
    // Since this is all dynamic, though, we don't want to actually link
    // against the class and use its methods. So we just parse this back
    // into int fields manually.  Sum them up and ensure that we get the
    // expected total for the first column, to verify that we got all the
    // results from the db into the file.
    int curSum = getFirstInt(val.toString());
    int totalResults = 1;

    // now sum up everything else in the file.
    while (reader.next(key) != null) {
      reader.getCurrentValue(val);
      curSum += getFirstInt(val.toString());
      totalResults++;
    }

    assertEquals("Total sum of first db column mismatch", expectedSum,
        curSum);
    assertEquals("Incorrect number of results for query", numExpectedResults,
        totalResults);
  } catch (InvalidOptionsException ioe) {
    fail(ioe.toString());
  } catch (ParseException pe) {
    fail(pe.toString());
  } finally {
    IOUtils.closeStream(reader);

    if (null != prevClassLoader) {
      ClassLoaderStack.setCurrentClassLoader(prevClassLoader);
    }
  }
}
 
Example 20
Project: hadoop-oss   File: NetworkTopology.java   View Source Code Vote up 2 votes
/**
 * Get an instance of NetworkTopology based on the value of the configuration
 * parameter net.topology.impl.
 * 
 * @param conf the configuration to be used
 * @return an instance of NetworkTopology
 */
public static NetworkTopology getInstance(Configuration conf){
  return ReflectionUtils.newInstance(
      conf.getClass(CommonConfigurationKeysPublic.NET_TOPOLOGY_IMPL_KEY,
      NetworkTopology.class, NetworkTopology.class), conf);
}