Java Code Examples for org.apache.hadoop.io.SequenceFile.Sorter.RawKeyValueIterator

The following examples show how to use org.apache.hadoop.io.SequenceFile.Sorter.RawKeyValueIterator. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: hadoop   Source File: MergeSorter.java    License: Apache License 2.0 5 votes vote down vote up
/** The sort method derived from BasicTypeSorterBase and overridden here*/
public RawKeyValueIterator sort() {
  MergeSort m = new MergeSort(this);
  int count = super.count;
  if (count == 0) return null;
  int [] pointers = super.pointers;
  int [] pointersCopy = new int[count];
  System.arraycopy(pointers, 0, pointersCopy, 0, count);
  m.mergeSort(pointers, pointersCopy, 0, count);
  return new MRSortResultIterator(super.keyValBuffer, pointersCopy, 
                                  super.startOffsets, super.keyLengths, super.valueLengths);
}
 
Example 2
Source Project: big-c   Source File: MergeSorter.java    License: Apache License 2.0 5 votes vote down vote up
/** The sort method derived from BasicTypeSorterBase and overridden here*/
public RawKeyValueIterator sort() {
  MergeSort m = new MergeSort(this);
  int count = super.count;
  if (count == 0) return null;
  int [] pointers = super.pointers;
  int [] pointersCopy = new int[count];
  System.arraycopy(pointers, 0, pointersCopy, 0, count);
  m.mergeSort(pointers, pointersCopy, 0, count);
  return new MRSortResultIterator(super.keyValBuffer, pointersCopy, 
                                  super.startOffsets, super.keyLengths, super.valueLengths);
}
 
Example 3
Source Project: RDFS   Source File: MergeSorter.java    License: Apache License 2.0 5 votes vote down vote up
/** The sort method derived from BasicTypeSorterBase and overridden here*/
public RawKeyValueIterator sort() {
  MergeSort m = new MergeSort(this);
  int count = super.count;
  if (count == 0) return null;
  int [] pointers = super.pointers;
  int [] pointersCopy = new int[count];
  System.arraycopy(pointers, 0, pointersCopy, 0, count);
  m.mergeSort(pointers, pointersCopy, 0, count);
  return new MRSortResultIterator(super.keyValBuffer, pointersCopy, 
                                  super.startOffsets, super.keyLengths, super.valueLengths);
}
 
Example 4
Source Project: hadoop-gpu   Source File: MergeSorter.java    License: Apache License 2.0 5 votes vote down vote up
/** The sort method derived from BasicTypeSorterBase and overridden here*/
public RawKeyValueIterator sort() {
  MergeSort m = new MergeSort(this);
  int count = super.count;
  if (count == 0) return null;
  int [] pointers = super.pointers;
  int [] pointersCopy = new int[count];
  System.arraycopy(pointers, 0, pointersCopy, 0, count);
  m.mergeSort(pointers, pointersCopy, 0, count);
  return new MRSortResultIterator(super.keyValBuffer, pointersCopy, 
                                  super.startOffsets, super.keyLengths, super.valueLengths);
}
 
Example 5
Source Project: hadoop   Source File: TestSequenceFileMergeProgress.java    License: Apache License 2.0 4 votes vote down vote up
public void runTest(CompressionType compressionType) throws IOException {
  JobConf job = new JobConf();
  FileSystem fs = FileSystem.getLocal(job);
  Path dir = new Path(System.getProperty("test.build.data",".") + "/mapred");
  Path file = new Path(dir, "test.seq");
  Path tempDir = new Path(dir, "tmp");

  fs.delete(dir, true);
  FileInputFormat.setInputPaths(job, dir);
  fs.mkdirs(tempDir);

  LongWritable tkey = new LongWritable();
  Text tval = new Text();

  SequenceFile.Writer writer =
    SequenceFile.createWriter(fs, job, file, LongWritable.class, Text.class,
      compressionType, new DefaultCodec());
  try {
    for (int i = 0; i < RECORDS; ++i) {
      tkey.set(1234);
      tval.set("valuevaluevaluevaluevaluevaluevaluevaluevaluevaluevalue");
      writer.append(tkey, tval);
    }
  } finally {
    writer.close();
  }
  
  long fileLength = fs.getFileStatus(file).getLen();
  LOG.info("With compression = " + compressionType + ": "
      + "compressed length = " + fileLength);
  
  SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs, 
      job.getOutputKeyComparator(), job.getMapOutputKeyClass(),
      job.getMapOutputValueClass(), job);
  Path[] paths = new Path[] {file};
  RawKeyValueIterator rIter = sorter.merge(paths, tempDir, false);
  int count = 0;
  while (rIter.next()) {
    count++;
  }
  assertEquals(RECORDS, count);
  assertEquals(1.0f, rIter.getProgress().get());
}
 
Example 6
Source Project: big-c   Source File: TestSequenceFileMergeProgress.java    License: Apache License 2.0 4 votes vote down vote up
public void runTest(CompressionType compressionType) throws IOException {
  JobConf job = new JobConf();
  FileSystem fs = FileSystem.getLocal(job);
  Path dir = new Path(System.getProperty("test.build.data",".") + "/mapred");
  Path file = new Path(dir, "test.seq");
  Path tempDir = new Path(dir, "tmp");

  fs.delete(dir, true);
  FileInputFormat.setInputPaths(job, dir);
  fs.mkdirs(tempDir);

  LongWritable tkey = new LongWritable();
  Text tval = new Text();

  SequenceFile.Writer writer =
    SequenceFile.createWriter(fs, job, file, LongWritable.class, Text.class,
      compressionType, new DefaultCodec());
  try {
    for (int i = 0; i < RECORDS; ++i) {
      tkey.set(1234);
      tval.set("valuevaluevaluevaluevaluevaluevaluevaluevaluevaluevalue");
      writer.append(tkey, tval);
    }
  } finally {
    writer.close();
  }
  
  long fileLength = fs.getFileStatus(file).getLen();
  LOG.info("With compression = " + compressionType + ": "
      + "compressed length = " + fileLength);
  
  SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs, 
      job.getOutputKeyComparator(), job.getMapOutputKeyClass(),
      job.getMapOutputValueClass(), job);
  Path[] paths = new Path[] {file};
  RawKeyValueIterator rIter = sorter.merge(paths, tempDir, false);
  int count = 0;
  while (rIter.next()) {
    count++;
  }
  assertEquals(RECORDS, count);
  assertEquals(1.0f, rIter.getProgress().get());
}
 
Example 7
Source Project: RDFS   Source File: TestSequenceFileMergeProgress.java    License: Apache License 2.0 4 votes vote down vote up
public void runTest(CompressionType compressionType) throws IOException {
  JobConf job = new JobConf();
  FileSystem fs = FileSystem.getLocal(job);
  Path dir = new Path(System.getProperty("test.build.data",".") + "/mapred");
  Path file = new Path(dir, "test.seq");
  Path tempDir = new Path(dir, "tmp");

  fs.delete(dir, true);
  FileInputFormat.setInputPaths(job, dir);
  fs.mkdirs(tempDir);

  LongWritable tkey = new LongWritable();
  Text tval = new Text();

  SequenceFile.Writer writer =
    SequenceFile.createWriter(fs, job, file, LongWritable.class, Text.class,
      compressionType, new DefaultCodec());
  try {
    for (int i = 0; i < RECORDS; ++i) {
      tkey.set(1234);
      tval.set("valuevaluevaluevaluevaluevaluevaluevaluevaluevaluevalue");
      writer.append(tkey, tval);
    }
  } finally {
    writer.close();
  }
  
  long fileLength = fs.getFileStatus(file).getLen();
  LOG.info("With compression = " + compressionType + ": "
      + "compressed length = " + fileLength);
  
  SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs, 
      job.getOutputKeyComparator(), job.getMapOutputKeyClass(),
      job.getMapOutputValueClass(), job);
  Path[] paths = new Path[] {file};
  RawKeyValueIterator rIter = sorter.merge(paths, tempDir, false);
  int count = 0;
  while (rIter.next()) {
    count++;
  }
  assertEquals(RECORDS, count);
  assertEquals(1.0f, rIter.getProgress().get());
}
 
Example 8
Source Project: hadoop-gpu   Source File: TestSequenceFileMergeProgress.java    License: Apache License 2.0 4 votes vote down vote up
public void runTest(CompressionType compressionType) throws IOException {
  JobConf job = new JobConf();
  FileSystem fs = FileSystem.getLocal(job);
  Path dir = new Path(System.getProperty("test.build.data",".") + "/mapred");
  Path file = new Path(dir, "test.seq");
  Path tempDir = new Path(dir, "tmp");

  fs.delete(dir, true);
  FileInputFormat.setInputPaths(job, dir);
  fs.mkdirs(tempDir);

  LongWritable tkey = new LongWritable();
  Text tval = new Text();

  SequenceFile.Writer writer =
    SequenceFile.createWriter(fs, job, file, LongWritable.class, Text.class,
      compressionType, new DefaultCodec());
  try {
    for (int i = 0; i < RECORDS; ++i) {
      tkey.set(1234);
      tval.set("valuevaluevaluevaluevaluevaluevaluevaluevaluevaluevalue");
      writer.append(tkey, tval);
    }
  } finally {
    writer.close();
  }
  
  long fileLength = fs.getFileStatus(file).getLen();
  LOG.info("With compression = " + compressionType + ": "
      + "compressed length = " + fileLength);
  
  SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs, 
      job.getOutputKeyComparator(), job.getMapOutputKeyClass(),
      job.getMapOutputValueClass(), job);
  Path[] paths = new Path[] {file};
  RawKeyValueIterator rIter = sorter.merge(paths, tempDir, false);
  int count = 0;
  while (rIter.next()) {
    count++;
  }
  assertEquals(RECORDS, count);
  assertEquals(1.0f, rIter.getProgress().get());
}
 
Example 9
Source Project: hadoop   Source File: BufferSorter.java    License: Apache License 2.0 2 votes vote down vote up
/** Framework decides when to actually sort
 */
public RawKeyValueIterator sort();
 
Example 10
Source Project: big-c   Source File: BufferSorter.java    License: Apache License 2.0 2 votes vote down vote up
/** Framework decides when to actually sort
 */
public RawKeyValueIterator sort();
 
Example 11
Source Project: RDFS   Source File: BufferSorter.java    License: Apache License 2.0 2 votes vote down vote up
/** Framework decides when to actually sort
 */
public RawKeyValueIterator sort();
 
Example 12
Source Project: hadoop-gpu   Source File: BufferSorter.java    License: Apache License 2.0 2 votes vote down vote up
/** Framework decides when to actually sort
 */
public RawKeyValueIterator sort();
 
Example 13
Source Project: hadoop   Source File: BasicTypeSorterBase.java    License: Apache License 2.0 votes vote down vote up
public abstract RawKeyValueIterator sort(); 
Example 14
Source Project: big-c   Source File: BasicTypeSorterBase.java    License: Apache License 2.0 votes vote down vote up
public abstract RawKeyValueIterator sort(); 
Example 15
Source Project: RDFS   Source File: BasicTypeSorterBase.java    License: Apache License 2.0 votes vote down vote up
public abstract RawKeyValueIterator sort(); 
Example 16
Source Project: hadoop-gpu   Source File: BasicTypeSorterBase.java    License: Apache License 2.0 votes vote down vote up
public abstract RawKeyValueIterator sort();