org.apache.hadoop.io.DataOutputBuffer Java Examples
The following examples show how to use
org.apache.hadoop.io.DataOutputBuffer.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CryptoStreamsTestBase.java From big-c with Apache License 2.0 | 6 votes |
@Before public void setUp() throws IOException { // Generate data final int seed = new Random().nextInt(); final DataOutputBuffer dataBuf = new DataOutputBuffer(); final RandomDatum.Generator generator = new RandomDatum.Generator(seed); for(int i = 0; i < count; ++i) { generator.next(); final RandomDatum key = generator.getKey(); final RandomDatum value = generator.getValue(); key.write(dataBuf); value.write(dataBuf); } LOG.info("Generated " + count + " records"); data = dataBuf.getData(); dataLen = dataBuf.getLength(); }
Example #2
Source File: TestCryptoStreams.java From big-c with Apache License 2.0 | 6 votes |
@Override protected OutputStream getOutputStream(int bufferSize, byte[] key, byte[] iv) throws IOException { DataOutputBuffer out = new DataOutputBuffer() { @Override public void flush() throws IOException { buf = getData(); bufLen = getLength(); } @Override public void close() throws IOException { buf = getData(); bufLen = getLength(); } }; return new CryptoOutputStream(new FakeOutputStream(out), codec, bufferSize, key, iv); }
Example #3
Source File: HFilesystemAdmin.java From spliceengine with GNU Affero General Public License v3.0 | 6 votes |
private static byte[] toByteArray(Writable... writables) { final DataOutputBuffer out = new DataOutputBuffer(); try { for(Writable w : writables) { w.write(out); } out.close(); } catch (IOException e) { throw new RuntimeException("Fail to convert writables to a byte array",e); } byte[] bytes = out.getData(); if (bytes.length == out.getLength()) { return bytes; } byte[] result = new byte[out.getLength()]; System.arraycopy(bytes, 0, result, 0, out.getLength()); return result; }
Example #4
Source File: StreamXmlRecordReader.java From RDFS with Apache License 2.0 | 6 votes |
public synchronized boolean next(WritableComparable key, Writable value) throws IOException { numNext++; if (pos_ >= end_) { return false; } DataOutputBuffer buf = new DataOutputBuffer(); if (!readUntilMatchBegin()) { return false; } if (!readUntilMatchEnd(buf)) { return false; } // There is only one elem..key/value splitting is not done here. byte[] record = new byte[buf.getLength()]; System.arraycopy(buf.getData(), 0, record, 0, record.length); numRecStats(record, 0, record.length); ((Text) key).set(record); ((Text) value).set(""); return true; }
Example #5
Source File: StreamXmlRecordReader.java From big-c with Apache License 2.0 | 6 votes |
public synchronized boolean next(Text key, Text value) throws IOException { numNext++; if (pos_ >= end_) { return false; } DataOutputBuffer buf = new DataOutputBuffer(); if (!readUntilMatchBegin()) { return false; } if (pos_ >= end_ || !readUntilMatchEnd(buf)) { return false; } // There is only one elem..key/value splitting is not done here. byte[] record = new byte[buf.getLength()]; System.arraycopy(buf.getData(), 0, record, 0, record.length); numRecStats(record, 0, record.length); key.set(record); value.set(""); return true; }
Example #6
Source File: TestMerge.java From hadoop with Apache License 2.0 | 6 votes |
public KeyValueWriter(Configuration conf, OutputStream output, Class<K> kyClass, Class<V> valClass ) throws IOException { keyClass = kyClass; valueClass = valClass; dataBuffer = new DataOutputBuffer(); SerializationFactory serializationFactory = new SerializationFactory(conf); keySerializer = (Serializer<K>)serializationFactory.getSerializer(keyClass); keySerializer.open(dataBuffer); valueSerializer = (Serializer<V>)serializationFactory.getSerializer(valueClass); valueSerializer.open(dataBuffer); outputStream = new DataOutputStream(output); }
Example #7
Source File: TestIFileStreams.java From big-c with Apache License 2.0 | 6 votes |
public void testIFileStream() throws Exception { final int DLEN = 100; DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4); IFileOutputStream ifos = new IFileOutputStream(dob); for (int i = 0; i < DLEN; ++i) { ifos.write(i); } ifos.close(); DataInputBuffer dib = new DataInputBuffer(); dib.reset(dob.getData(), DLEN + 4); IFileInputStream ifis = new IFileInputStream(dib, 104, new Configuration()); for (int i = 0; i < DLEN; ++i) { assertEquals(i, ifis.read()); } ifis.close(); }
Example #8
Source File: Utils.java From stratosphere with Apache License 2.0 | 6 votes |
public static void setTokensFor(ContainerLaunchContext amContainer, Path[] paths, Configuration conf) throws IOException { Credentials credentials = new Credentials(); // for HDFS TokenCache.obtainTokensForNamenodes(credentials, paths, conf); // for user UserGroupInformation currUsr = UserGroupInformation.getCurrentUser(); Collection<Token<? extends TokenIdentifier>> usrTok = currUsr.getTokens(); for(Token<? extends TokenIdentifier> token : usrTok) { final Text id = new Text(token.getIdentifier()); LOG.info("Adding user token "+id+" with "+token); credentials.addToken(id, token); } DataOutputBuffer dob = new DataOutputBuffer(); credentials.writeTokenStorageToStream(dob); LOG.debug("Wrote tokens. Credentials buffer length: "+dob.getLength()); ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength()); amContainer.setTokens(securityTokens); }
Example #9
Source File: EditLogBackupOutputStream.java From hadoop with Apache License 2.0 | 6 votes |
EditLogBackupOutputStream(NamenodeRegistration bnReg, // backup node JournalInfo journalInfo) // active name-node throws IOException { super(); this.bnRegistration = bnReg; this.journalInfo = journalInfo; InetSocketAddress bnAddress = NetUtils.createSocketAddr(bnRegistration.getAddress()); try { this.backupNode = NameNodeProxies.createNonHAProxy(new HdfsConfiguration(), bnAddress, JournalProtocol.class, UserGroupInformation.getCurrentUser(), true).getProxy(); } catch(IOException e) { Storage.LOG.error("Error connecting to: " + bnAddress, e); throw e; } this.doubleBuf = new EditsDoubleBuffer(DEFAULT_BUFFER_SIZE); this.out = new DataOutputBuffer(DEFAULT_BUFFER_SIZE); }
Example #10
Source File: Utils.java From flink with Apache License 2.0 | 6 votes |
public static void setTokensFor(ContainerLaunchContext amContainer, List<Path> paths, Configuration conf) throws IOException { Credentials credentials = new Credentials(); // for HDFS TokenCache.obtainTokensForNamenodes(credentials, paths.toArray(new Path[0]), conf); // for HBase obtainTokenForHBase(credentials, conf); // for user UserGroupInformation currUsr = UserGroupInformation.getCurrentUser(); Collection<Token<? extends TokenIdentifier>> usrTok = currUsr.getTokens(); for (Token<? extends TokenIdentifier> token : usrTok) { final Text id = new Text(token.getIdentifier()); LOG.info("Adding user token " + id + " with " + token); credentials.addToken(id, token); } try (DataOutputBuffer dob = new DataOutputBuffer()) { credentials.writeTokenStorageToStream(dob); if (LOG.isDebugEnabled()) { LOG.debug("Wrote tokens. Credentials buffer length: " + dob.getLength()); } ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength()); amContainer.setTokens(securityTokens); } }
Example #11
Source File: TestWritableSerialization.java From hadoop with Apache License 2.0 | 6 votes |
@Test @SuppressWarnings({"rawtypes", "unchecked"}) public void testWritableComparatorJavaSerialization() throws Exception { Serialization ser = new JavaSerialization(); Serializer<TestWC> serializer = ser.getSerializer(TestWC.class); DataOutputBuffer dob = new DataOutputBuffer(); serializer.open(dob); TestWC orig = new TestWC(0); serializer.serialize(orig); serializer.close(); Deserializer<TestWC> deserializer = ser.getDeserializer(TestWC.class); DataInputBuffer dib = new DataInputBuffer(); dib.reset(dob.getData(), 0, dob.getLength()); deserializer.open(dib); TestWC deser = deserializer.deserialize(null); deserializer.close(); assertEquals(orig, deser); }
Example #12
Source File: TestPBRecordImpl.java From hadoop with Apache License 2.0 | 6 votes |
@Test(timeout=10000) public void testLocalizerStatusSerDe() throws Exception { LocalizerStatus rsrcS = createLocalizerStatus(); assertTrue(rsrcS instanceof LocalizerStatusPBImpl); LocalizerStatusPBImpl rsrcPb = (LocalizerStatusPBImpl) rsrcS; DataOutputBuffer out = new DataOutputBuffer(); rsrcPb.getProto().writeDelimitedTo(out); DataInputBuffer in = new DataInputBuffer(); in.reset(out.getData(), 0, out.getLength()); LocalizerStatusProto rsrcPbD = LocalizerStatusProto.parseDelimitedFrom(in); assertNotNull(rsrcPbD); LocalizerStatus rsrcD = new LocalizerStatusPBImpl(rsrcPbD); assertEquals(rsrcS, rsrcD); assertEquals("localizer0", rsrcS.getLocalizerId()); assertEquals("localizer0", rsrcD.getLocalizerId()); assertEquals(createLocalResourceStatus(), rsrcS.getResourceStatus(0)); assertEquals(createLocalResourceStatus(), rsrcD.getResourceStatus(0)); }
Example #13
Source File: TestWritableJobConf.java From RDFS with Apache License 2.0 | 6 votes |
private <K> K serDeser(K conf) throws Exception { SerializationFactory factory = new SerializationFactory(CONF); Serializer<K> serializer = factory.getSerializer(GenericsUtil.getClass(conf)); Deserializer<K> deserializer = factory.getDeserializer(GenericsUtil.getClass(conf)); DataOutputBuffer out = new DataOutputBuffer(); serializer.open(out); serializer.serialize(conf); serializer.close(); DataInputBuffer in = new DataInputBuffer(); in.reset(out.getData(), out.getLength()); deserializer.open(in); K after = deserializer.deserialize(null); deserializer.close(); return after; }
Example #14
Source File: TestMerge.java From big-c with Apache License 2.0 | 6 votes |
public KeyValueWriter(Configuration conf, OutputStream output, Class<K> kyClass, Class<V> valClass ) throws IOException { keyClass = kyClass; valueClass = valClass; dataBuffer = new DataOutputBuffer(); SerializationFactory serializationFactory = new SerializationFactory(conf); keySerializer = (Serializer<K>)serializationFactory.getSerializer(keyClass); keySerializer.open(dataBuffer); valueSerializer = (Serializer<V>)serializationFactory.getSerializer(valueClass); valueSerializer.open(dataBuffer); outputStream = new DataOutputStream(output); }
Example #15
Source File: SequenceFile.java From gemfirexd-oss with Apache License 2.0 | 6 votes |
/** @deprecated Call {@link #nextRaw(DataOutputBuffer,SequenceFile.ValueBytes)}. */ @Deprecated synchronized int next(DataOutputBuffer buffer) throws IOException { // Unsupported for block-compressed sequence files if (blockCompressed) { throw new IOException("Unsupported call for block-compressed" + " SequenceFiles - use SequenceFile.Reader.next(DataOutputStream, ValueBytes)"); } try { int length = readRecordLength(); if (length == -1) { return -1; } int keyLength = in.readInt(); buffer.write(in, length); return keyLength; } catch (ChecksumException e) { // checksum failure handleChecksumException(e); return next(buffer); } }
Example #16
Source File: Chain.java From big-c with Apache License 2.0 | 6 votes |
private <E> E makeCopyForPassByValue(Serialization<E> serialization, E obj) throws IOException { Serializer<E> ser = serialization.getSerializer(GenericsUtil.getClass(obj)); Deserializer<E> deser = serialization.getDeserializer(GenericsUtil.getClass(obj)); DataOutputBuffer dof = threadLocalDataOutputBuffer.get(); dof.reset(); ser.open(dof); ser.serialize(obj); ser.close(); obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj), getChainJobConf()); ByteArrayInputStream bais = new ByteArrayInputStream(dof.getData(), 0, dof.getLength()); deser.open(bais); deser.deserialize(obj); deser.close(); return obj; }
Example #17
Source File: TestDelegationTokenRemoteFetcher.java From big-c with Apache License 2.0 | 6 votes |
@Override public void handle(Channel channel, Token<DelegationTokenIdentifier> token, String serviceUrl) throws IOException { Assert.assertEquals(testToken, token); Credentials creds = new Credentials(); creds.addToken(new Text(serviceUrl), token); DataOutputBuffer out = new DataOutputBuffer(); creds.write(out); int fileLength = out.getData().length; ChannelBuffer cbuffer = ChannelBuffers.buffer(fileLength); cbuffer.writeBytes(out.getData()); HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK); response.setHeader(HttpHeaders.Names.CONTENT_LENGTH, String.valueOf(fileLength)); response.setContent(cbuffer); channel.write(response).addListener(ChannelFutureListener.CLOSE); }
Example #18
Source File: TestJspHelper.java From hadoop with Apache License 2.0 | 6 votes |
@Test public void testReadWriteReplicaState() { try { DataOutputBuffer out = new DataOutputBuffer(); DataInputBuffer in = new DataInputBuffer(); for (HdfsServerConstants.ReplicaState repState : HdfsServerConstants.ReplicaState .values()) { repState.write(out); in.reset(out.getData(), out.getLength()); HdfsServerConstants.ReplicaState result = HdfsServerConstants.ReplicaState .read(in); assertTrue("testReadWrite error !!!", repState == result); out.reset(); in.reset(); } } catch (Exception ex) { fail("testReadWrite ex error ReplicaState"); } }
Example #19
Source File: SequenceFile.java From gemfirexd-oss with Apache License 2.0 | 6 votes |
/** Read a compressed buffer */ private synchronized void readBuffer(DataInputBuffer buffer, CompressionInputStream filter) throws IOException { // Read data into a temporary buffer DataOutputBuffer dataBuffer = new DataOutputBuffer(); try { int dataBufferLength = WritableUtils.readVInt(in); dataBuffer.write(in, dataBufferLength); // Set up 'buffer' connected to the input-stream buffer.reset(dataBuffer.getData(), 0, dataBuffer.getLength()); } finally { dataBuffer.close(); } // Reset the codec filter.resetState(); }
Example #20
Source File: StreamXmlRecordReader.java From hadoop with Apache License 2.0 | 6 votes |
public synchronized boolean next(Text key, Text value) throws IOException { numNext++; if (pos_ >= end_) { return false; } DataOutputBuffer buf = new DataOutputBuffer(); if (!readUntilMatchBegin()) { return false; } if (pos_ >= end_ || !readUntilMatchEnd(buf)) { return false; } // There is only one elem..key/value splitting is not done here. byte[] record = new byte[buf.getLength()]; System.arraycopy(buf.getData(), 0, record, 0, record.length); numRecStats(record, 0, record.length); key.set(record); value.set(""); return true; }
Example #21
Source File: QueryWritableTest.java From incubator-retired-blur with Apache License 2.0 | 6 votes |
@Test public void testTermQuery() throws IOException { TermQuery query = new TermQuery(new Term("field", "value")); QueryWritable queryWritable = new QueryWritable(); queryWritable.setQuery(query); DataOutputBuffer out = new DataOutputBuffer(); queryWritable.write(out); byte[] data = out.getData(); int length = out.getLength(); DataInputBuffer in = new DataInputBuffer(); in.reset(data, length); QueryWritable newQueryWritable = new QueryWritable(); newQueryWritable.readFields(in); Query termQuery = newQueryWritable.getQuery(); assertEquals(query, termQuery); }
Example #22
Source File: Chain.java From RDFS with Apache License 2.0 | 6 votes |
private <E> E makeCopyForPassByValue(Serialization<E> serialization, E obj) throws IOException { Serializer<E> ser = serialization.getSerializer(GenericsUtil.getClass(obj)); Deserializer<E> deser = serialization.getDeserializer(GenericsUtil.getClass(obj)); DataOutputBuffer dof = threadLocalDataOutputBuffer.get(); dof.reset(); ser.open(dof); ser.serialize(obj); ser.close(); obj = ReflectionUtils.newInstance(GenericsUtil.getClass(obj), getChainJobConf()); ByteArrayInputStream bais = new ByteArrayInputStream(dof.getData(), 0, dof.getLength()); deser.open(bais); deser.deserialize(obj); deser.close(); return obj; }
Example #23
Source File: SerializationTestUtil.java From hadoop with Apache License 2.0 | 6 votes |
/** * A utility that tests serialization/deserialization. * @param conf configuration to use, "io.serializations" is read to * determine the serialization * @param <K> the class of the item * @param before item to (de)serialize * @return deserialized item */ public static <K> K testSerialization(Configuration conf, K before) throws Exception { SerializationFactory factory = new SerializationFactory(conf); Serializer<K> serializer = factory.getSerializer(GenericsUtil.getClass(before)); Deserializer<K> deserializer = factory.getDeserializer(GenericsUtil.getClass(before)); DataOutputBuffer out = new DataOutputBuffer(); serializer.open(out); serializer.serialize(before); serializer.close(); DataInputBuffer in = new DataInputBuffer(); in.reset(out.getData(), out.getLength()); deserializer.open(in); K after = deserializer.deserialize(null); deserializer.close(); return after; }
Example #24
Source File: EditsDoubleBuffer.java From big-c with Apache License 2.0 | 5 votes |
@Override public DataOutputBuffer reset() { super.reset(); firstTxId = HdfsConstants.INVALID_TXID; numTxns = 0; return this; }
Example #25
Source File: XMLParser.java From incubator-retired-mrql with Apache License 2.0 | 5 votes |
public void open ( String file ) { try { splitter = new XMLSplitter(tags,file,new DataOutputBuffer()); } catch (Exception e) { throw new Error(e); } }
Example #26
Source File: FlinkXMLSplitter.java From incubator-retired-mrql with Apache License 2.0 | 5 votes |
FlinkXMLSplitter ( String[] tags, String file ) { in_memory = true; try { in = new BufferedReader(new InputStreamReader(new FileInputStream(file)), 100000); } catch ( Exception e ) { throw new Error("Cannot open the file: "+file); }; this.tags = tags; this.buffer = new DataOutputBuffer(); }
Example #27
Source File: EditsDoubleBuffer.java From RDFS with Apache License 2.0 | 5 votes |
@Override public DataOutputBuffer reset() { super.reset(); firstTxId = HdfsConstants.INVALID_TXID; numTxns = 0; return this; }
Example #28
Source File: TestCodec.java From big-c with Apache License 2.0 | 5 votes |
void GzipConcatTest(Configuration conf, Class<? extends Decompressor> decomClass) throws IOException { Random r = new Random(); long seed = r.nextLong(); r.setSeed(seed); LOG.info(decomClass + " seed: " + seed); final int CONCAT = r.nextInt(4) + 3; final int BUFLEN = 128 * 1024; DataOutputBuffer dflbuf = new DataOutputBuffer(); DataOutputBuffer chkbuf = new DataOutputBuffer(); byte[] b = new byte[BUFLEN]; for (int i = 0; i < CONCAT; ++i) { GZIPOutputStream gzout = new GZIPOutputStream(dflbuf); r.nextBytes(b); int len = r.nextInt(BUFLEN); int off = r.nextInt(BUFLEN - len); chkbuf.write(b, off, len); gzout.write(b, off, len); gzout.close(); } final byte[] chk = Arrays.copyOf(chkbuf.getData(), chkbuf.getLength()); CompressionCodec codec = ReflectionUtils.newInstance(GzipCodec.class, conf); Decompressor decom = codec.createDecompressor(); assertNotNull(decom); assertEquals(decomClass, decom.getClass()); DataInputBuffer gzbuf = new DataInputBuffer(); gzbuf.reset(dflbuf.getData(), dflbuf.getLength()); InputStream gzin = codec.createInputStream(gzbuf, decom); dflbuf.reset(); IOUtils.copyBytes(gzin, dflbuf, 4096); final byte[] dflchk = Arrays.copyOf(dflbuf.getData(), dflbuf.getLength()); assertArrayEquals(chk, dflchk); }
Example #29
Source File: TezCommonUtils.java From tez with Apache License 2.0 | 5 votes |
public static ByteBuffer convertJobTokenToBytes( Token<JobTokenIdentifier> jobToken) throws IOException { DataOutputBuffer dob = new DataOutputBuffer(); jobToken.write(dob); ByteBuffer bb = ByteBuffer.wrap(dob.getData(), 0, dob.getLength()); return bb; }
Example #30
Source File: StreamXmlRecordReader.java From RDFS with Apache License 2.0 | 5 votes |
private boolean readUntilMatchEnd(DataOutputBuffer buf) throws IOException { if (slowMatch_) { return slowReadUntilMatch(endPat_, true, buf); } else { return fastReadUntilMatch(endMark_, true, buf); } }