org.apache.hadoop.mapred.IFile.Reader Java Examples
The following examples show how to use
org.apache.hadoop.mapred.IFile.Reader.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestMerger.java From big-c with Apache License 2.0 | 6 votes |
private Answer<?> getKeyAnswer(final String segmentName, final boolean isCompressedInput) { return new Answer<Object>() { int i = 0; @SuppressWarnings("unchecked") public Boolean answer(InvocationOnMock invocation) { if (i++ == 3) { return false; } Reader<Text,Text> mock = (Reader<Text,Text>) invocation.getMock(); int multiplier = isCompressedInput ? 100 : 1; mock.bytesRead += 10 * multiplier; Object[] args = invocation.getArguments(); DataInputBuffer key = (DataInputBuffer) args[0]; key.reset(("Segment Key " + segmentName + i).getBytes(), 20); return true; } }; }
Example #2
Source File: TestMerger.java From big-c with Apache License 2.0 | 6 votes |
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path, List<String> keys, List<String> values) throws IOException { FSDataInputStream in = CryptoUtils.wrapIfNecessary(conf, fs.open(path)); IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, in, fs.getFileStatus(path).getLen(), null, null); DataInputBuffer keyBuff = new DataInputBuffer(); DataInputBuffer valueBuff = new DataInputBuffer(); Text key = new Text(); Text value = new Text(); while (reader.nextRawKey(keyBuff)) { key.readFields(keyBuff); keys.add(key.toString()); reader.nextRawValue(valueBuff); value.readFields(valueBuff); values.add(value.toString()); } }
Example #3
Source File: TestMerger.java From hadoop with Apache License 2.0 | 6 votes |
private void readOnDiskMapOutput(Configuration conf, FileSystem fs, Path path, List<String> keys, List<String> values) throws IOException { FSDataInputStream in = CryptoUtils.wrapIfNecessary(conf, fs.open(path)); IFile.Reader<Text, Text> reader = new IFile.Reader<Text, Text>(conf, in, fs.getFileStatus(path).getLen(), null, null); DataInputBuffer keyBuff = new DataInputBuffer(); DataInputBuffer valueBuff = new DataInputBuffer(); Text key = new Text(); Text value = new Text(); while (reader.nextRawKey(keyBuff)) { key.readFields(keyBuff); keys.add(key.toString()); reader.nextRawValue(valueBuff); value.readFields(valueBuff); values.add(value.toString()); } }
Example #4
Source File: TestMerger.java From hadoop with Apache License 2.0 | 6 votes |
private Answer<?> getKeyAnswer(final String segmentName, final boolean isCompressedInput) { return new Answer<Object>() { int i = 0; @SuppressWarnings("unchecked") public Boolean answer(InvocationOnMock invocation) { if (i++ == 3) { return false; } Reader<Text,Text> mock = (Reader<Text,Text>) invocation.getMock(); int multiplier = isCompressedInput ? 100 : 1; mock.bytesRead += 10 * multiplier; Object[] args = invocation.getArguments(); DataInputBuffer key = (DataInputBuffer) args[0]; key.reset(("Segment Key " + segmentName + i).getBytes(), 20); return true; } }; }
Example #5
Source File: Merger.java From big-c with Apache License 2.0 | 5 votes |
public Segment(Reader<K, V> reader, boolean preserve, Counters.Counter mapOutputsCounter) { this.reader = reader; this.preserve = preserve; this.segmentLength = reader.getLength(); this.mapOutputsCounter = mapOutputsCounter; }
Example #6
Source File: Merger.java From hadoop-gpu with Apache License 2.0 | 5 votes |
private void init(Counters.Counter readsCounter) throws IOException { if (reader == null) { FSDataInputStream in = fs.open(file); in.seek(segmentOffset); reader = new Reader<K, V>(conf, in, segmentLength, codec, readsCounter); } }
Example #7
Source File: Merger.java From RDFS with Apache License 2.0 | 5 votes |
private void init(Counters.Counter readsCounter) throws IOException { if (reader == null) { FSDataInputStream in = fs.open(file); in.seek(segmentOffset); reader = new Reader<K, V>(conf, in, segmentLength, codec, readsCounter); } }
Example #8
Source File: TestMerger.java From big-c with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") private Reader<Text, Text> getReader(int i, boolean isCompressedInput) throws IOException { Reader<Text, Text> readerMock = mock(Reader.class); when(readerMock.getLength()).thenReturn(30l); when(readerMock.getPosition()).thenReturn(0l).thenReturn(10l).thenReturn( 20l); when( readerMock.nextRawKey(any(DataInputBuffer.class))) .thenAnswer(getKeyAnswer("Segment" + i, isCompressedInput)); doAnswer(getValueAnswer("Segment" + i)).when(readerMock).nextRawValue( any(DataInputBuffer.class)); return readerMock; }
Example #9
Source File: BackupStore.java From big-c with Apache License 2.0 | 5 votes |
/** * This method creates a memory segment from the existing buffer * @throws IOException */ void createInMemorySegment () throws IOException { // If nothing was written in this block because the record size // was greater than the allocated block size, just return. if (usedSize == 0) { ramManager.unreserve(blockSize); return; } // spaceAvailable would have ensured that there is enough space // left for the EOF markers. assert ((blockSize - usedSize) >= EOF_MARKER_SIZE); WritableUtils.writeVInt(dataOut, IFile.EOF_MARKER); WritableUtils.writeVInt(dataOut, IFile.EOF_MARKER); usedSize += EOF_MARKER_SIZE; ramManager.unreserve(blockSize - usedSize); Reader<K, V> reader = new org.apache.hadoop.mapreduce.task.reduce.InMemoryReader<K, V>(null, (org.apache.hadoop.mapred.TaskAttemptID) tid, dataOut.getData(), 0, usedSize, conf); Segment<K, V> segment = new Segment<K, V>(reader, false); segmentList.add(segment); LOG.debug("Added Memory Segment to List. List Size is " + segmentList.size()); }
Example #10
Source File: Merger.java From big-c with Apache License 2.0 | 5 votes |
void init(Counters.Counter readsCounter) throws IOException { if (reader == null) { FSDataInputStream in = fs.open(file); in.seek(segmentOffset); in = CryptoUtils.wrapIfNecessary(conf, in); reader = new Reader<K, V>(conf, in, segmentLength - CryptoUtils.cryptoPadding(conf), codec, readsCounter); } if (mapOutputsCounter != null) { mapOutputsCounter.increment(1); } }
Example #11
Source File: TestMerger.java From hadoop with Apache License 2.0 | 5 votes |
@SuppressWarnings("unchecked") private Reader<Text, Text> getReader(int i, boolean isCompressedInput) throws IOException { Reader<Text, Text> readerMock = mock(Reader.class); when(readerMock.getLength()).thenReturn(30l); when(readerMock.getPosition()).thenReturn(0l).thenReturn(10l).thenReturn( 20l); when( readerMock.nextRawKey(any(DataInputBuffer.class))) .thenAnswer(getKeyAnswer("Segment" + i, isCompressedInput)); doAnswer(getValueAnswer("Segment" + i)).when(readerMock).nextRawValue( any(DataInputBuffer.class)); return readerMock; }
Example #12
Source File: BackupStore.java From hadoop with Apache License 2.0 | 5 votes |
/** * This method creates a memory segment from the existing buffer * @throws IOException */ void createInMemorySegment () throws IOException { // If nothing was written in this block because the record size // was greater than the allocated block size, just return. if (usedSize == 0) { ramManager.unreserve(blockSize); return; } // spaceAvailable would have ensured that there is enough space // left for the EOF markers. assert ((blockSize - usedSize) >= EOF_MARKER_SIZE); WritableUtils.writeVInt(dataOut, IFile.EOF_MARKER); WritableUtils.writeVInt(dataOut, IFile.EOF_MARKER); usedSize += EOF_MARKER_SIZE; ramManager.unreserve(blockSize - usedSize); Reader<K, V> reader = new org.apache.hadoop.mapreduce.task.reduce.InMemoryReader<K, V>(null, (org.apache.hadoop.mapred.TaskAttemptID) tid, dataOut.getData(), 0, usedSize, conf); Segment<K, V> segment = new Segment<K, V>(reader, false); segmentList.add(segment); LOG.debug("Added Memory Segment to List. List Size is " + segmentList.size()); }
Example #13
Source File: Merger.java From hadoop with Apache License 2.0 | 5 votes |
void init(Counters.Counter readsCounter) throws IOException { if (reader == null) { FSDataInputStream in = fs.open(file); in.seek(segmentOffset); in = CryptoUtils.wrapIfNecessary(conf, in); reader = new Reader<K, V>(conf, in, segmentLength - CryptoUtils.cryptoPadding(conf), codec, readsCounter); } if (mapOutputsCounter != null) { mapOutputsCounter.increment(1); } }
Example #14
Source File: Merger.java From hadoop with Apache License 2.0 | 5 votes |
public Segment(Reader<K, V> reader, boolean preserve, Counters.Counter mapOutputsCounter) { this.reader = reader; this.preserve = preserve; this.segmentLength = reader.getLength(); this.mapOutputsCounter = mapOutputsCounter; }
Example #15
Source File: Merger.java From big-c with Apache License 2.0 | 4 votes |
public Segment(Reader<K, V> reader, boolean preserve, long rawDataLength) { this(reader, preserve, null); this.rawDataLength = rawDataLength; }
Example #16
Source File: Merger.java From hadoop with Apache License 2.0 | 4 votes |
public Segment(Reader<K, V> reader, boolean preserve) { this(reader, preserve, null); }
Example #17
Source File: Merger.java From big-c with Apache License 2.0 | 4 votes |
public Segment(Reader<K, V> reader, boolean preserve) { this(reader, preserve, null); }
Example #18
Source File: Merger.java From big-c with Apache License 2.0 | 4 votes |
Reader<K,V> getReader() { return reader; }
Example #19
Source File: Merger.java From hadoop with Apache License 2.0 | 4 votes |
Reader<K,V> getReader() { return reader; }
Example #20
Source File: Merger.java From RDFS with Apache License 2.0 | 4 votes |
public Segment(Reader<K, V> reader, boolean preserve) { this.reader = reader; this.preserve = preserve; this.segmentLength = reader.getLength(); }
Example #21
Source File: Merger.java From hadoop-gpu with Apache License 2.0 | 4 votes |
public Segment(Reader<K, V> reader, boolean preserve) { this.reader = reader; this.preserve = preserve; this.segmentLength = reader.getLength(); }
Example #22
Source File: Merger.java From hadoop with Apache License 2.0 | 4 votes |
public Segment(Reader<K, V> reader, boolean preserve, long rawDataLength) { this(reader, preserve, null); this.rawDataLength = rawDataLength; }