Java Code Examples for org.apache.hadoop.mapreduce.RecordWriter#write()
The following examples show how to use
org.apache.hadoop.mapreduce.RecordWriter#write() .
These examples are extracted from open source projects.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: mnemonic File: MneMapreducePersonDataTest.java License: Apache License 2.0 | 6 votes |
@Test(enabled = true) public void testWritePersonData() throws Exception { NullWritable nada = NullWritable.get(); MneDurableOutputSession<Person<Long>> sess = new MneDurableOutputSession<Person<Long>>(m_tacontext, null, MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX); MneDurableOutputValue<Person<Long>> mdvalue = new MneDurableOutputValue<Person<Long>>(sess); OutputFormat<NullWritable, MneDurableOutputValue<Person<Long>>> outputFormat = new MneOutputFormat<MneDurableOutputValue<Person<Long>>>(); RecordWriter<NullWritable, MneDurableOutputValue<Person<Long>>> writer = outputFormat.getRecordWriter(m_tacontext); Person<Long> person = null; for (int i = 0; i < m_reccnt; ++i) { person = sess.newDurableObjectRecord(); person.setAge((short) m_rand.nextInt(50)); person.setName(String.format("Name: [%s]", Utils.genRandomString()), true); m_sumage += person.getAge(); writer.write(nada, mdvalue.of(person)); } writer.close(m_tacontext); sess.close(); }
Example 2
Source Project: mnemonic File: MneMapreduceLongDataTest.java License: Apache License 2.0 | 6 votes |
@Test(enabled = true) public void testWriteLongData() throws Exception { NullWritable nada = NullWritable.get(); MneDurableOutputSession<Long> sess = new MneDurableOutputSession<Long>(m_tacontext, null, MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX); MneDurableOutputValue<Long> mdvalue = new MneDurableOutputValue<Long>(sess); OutputFormat<NullWritable, MneDurableOutputValue<Long>> outputFormat = new MneOutputFormat<MneDurableOutputValue<Long>>(); RecordWriter<NullWritable, MneDurableOutputValue<Long>> writer = outputFormat.getRecordWriter(m_tacontext); Long val = null; for (int i = 0; i < m_reccnt; ++i) { val = m_rand.nextLong(); m_sum += val; writer.write(nada, mdvalue.of(val)); } writer.close(m_tacontext); sess.close(); }
Example 3
Source Project: datawave File: SafeFileOutputCommitterTest.java License: Apache License 2.0 | 6 votes |
private void writeOutput(RecordWriter theRecordWriter, TaskAttemptContext context) throws IOException, InterruptedException { NullWritable nullWritable = NullWritable.get(); try { theRecordWriter.write(key1, val1); theRecordWriter.write(null, nullWritable); theRecordWriter.write(null, val1); theRecordWriter.write(nullWritable, val2); theRecordWriter.write(key2, nullWritable); theRecordWriter.write(key1, null); theRecordWriter.write(null, null); theRecordWriter.write(key2, val2); } finally { theRecordWriter.close(context); } }
Example 4
Source Project: big-c File: TestFileOutputCommitter.java License: Apache License 2.0 | 6 votes |
private void writeOutput(RecordWriter theRecordWriter, TaskAttemptContext context) throws IOException, InterruptedException { NullWritable nullWritable = NullWritable.get(); try { theRecordWriter.write(key1, val1); theRecordWriter.write(null, nullWritable); theRecordWriter.write(null, val1); theRecordWriter.write(nullWritable, val2); theRecordWriter.write(key2, nullWritable); theRecordWriter.write(key1, null); theRecordWriter.write(null, null); theRecordWriter.write(key2, val2); } finally { theRecordWriter.close(context); } }
Example 5
Source Project: big-c File: TestMRCJCFileOutputCommitter.java License: Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private void writeOutput(RecordWriter theRecordWriter, TaskAttemptContext context) throws IOException, InterruptedException { NullWritable nullWritable = NullWritable.get(); try { theRecordWriter.write(key1, val1); theRecordWriter.write(null, nullWritable); theRecordWriter.write(null, val1); theRecordWriter.write(nullWritable, val2); theRecordWriter.write(key2, nullWritable); theRecordWriter.write(key1, null); theRecordWriter.write(null, null); theRecordWriter.write(key2, val2); } finally { theRecordWriter.close(context); } }
Example 6
Source Project: mnemonic File: MneMapreduceBufferDataTest.java License: Apache License 2.0 | 6 votes |
@Test(enabled = true) public void testWriteBufferData() throws Exception { NullWritable nada = NullWritable.get(); MneDurableOutputSession<DurableBuffer<?>> sess = new MneDurableOutputSession<DurableBuffer<?>>(m_tacontext, null, MneConfigHelper.DEFAULT_OUTPUT_CONFIG_PREFIX); MneDurableOutputValue<DurableBuffer<?>> mdvalue = new MneDurableOutputValue<DurableBuffer<?>>(sess); OutputFormat<NullWritable, MneDurableOutputValue<DurableBuffer<?>>> outputFormat = new MneOutputFormat<MneDurableOutputValue<DurableBuffer<?>>>(); RecordWriter<NullWritable, MneDurableOutputValue<DurableBuffer<?>>> writer = outputFormat.getRecordWriter(m_tacontext); DurableBuffer<?> dbuf = null; Checksum cs = new CRC32(); cs.reset(); for (int i = 0; i < m_reccnt; ++i) { dbuf = genupdDurableBuffer(sess, cs); Assert.assertNotNull(dbuf); writer.write(nada, mdvalue.of(dbuf)); } m_checksum = cs.getValue(); writer.close(m_tacontext); sess.close(); }
Example 7
Source Project: hadoop File: TestFileOutputCommitter.java License: Apache License 2.0 | 6 votes |
private void writeOutput(RecordWriter theRecordWriter, TaskAttemptContext context) throws IOException, InterruptedException { NullWritable nullWritable = NullWritable.get(); try { theRecordWriter.write(key1, val1); theRecordWriter.write(null, nullWritable); theRecordWriter.write(null, val1); theRecordWriter.write(nullWritable, val2); theRecordWriter.write(key2, nullWritable); theRecordWriter.write(key1, null); theRecordWriter.write(null, null); theRecordWriter.write(key2, val2); } finally { theRecordWriter.close(context); } }
Example 8
Source Project: beam File: HadoopFormatIO.java License: Apache License 2.0 | 5 votes |
/** * Writes one {@link KV} pair for given {@link TaskID}. * * @param kv Iterable of pairs to write * @param taskContext taskContext */ private void write(KV<KeyT, ValueT> kv, TaskContext<KeyT, ValueT> taskContext) { try { RecordWriter<KeyT, ValueT> recordWriter = taskContext.getRecordWriter(); recordWriter.write(kv.getKey(), kv.getValue()); } catch (Exception e) { processTaskException(taskContext, e); } }
Example 9
Source Project: big-c File: TestRecovery.java License: Apache License 2.0 | 5 votes |
private void writeBadOutput(TaskAttempt attempt, Configuration conf) throws Exception { TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, TypeConverter.fromYarn(attempt.getID())); TextOutputFormat<?, ?> theOutputFormat = new TextOutputFormat(); RecordWriter theRecordWriter = theOutputFormat .getRecordWriter(tContext); NullWritable nullWritable = NullWritable.get(); try { theRecordWriter.write(key2, val2); theRecordWriter.write(null, nullWritable); theRecordWriter.write(null, val2); theRecordWriter.write(nullWritable, val1); theRecordWriter.write(key1, nullWritable); theRecordWriter.write(key2, null); theRecordWriter.write(null, null); theRecordWriter.write(key1, val1); } finally { theRecordWriter.close(tContext); } OutputFormat outputFormat = ReflectionUtils.newInstance( tContext.getOutputFormatClass(), conf); OutputCommitter committer = outputFormat.getOutputCommitter(tContext); committer.commitTask(tContext); }
Example 10
Source Project: big-c File: TestRecovery.java License: Apache License 2.0 | 5 votes |
private void writeOutput(TaskAttempt attempt, Configuration conf) throws Exception { TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, TypeConverter.fromYarn(attempt.getID())); TextOutputFormat<?, ?> theOutputFormat = new TextOutputFormat(); RecordWriter theRecordWriter = theOutputFormat .getRecordWriter(tContext); NullWritable nullWritable = NullWritable.get(); try { theRecordWriter.write(key1, val1); theRecordWriter.write(null, nullWritable); theRecordWriter.write(null, val1); theRecordWriter.write(nullWritable, val2); theRecordWriter.write(key2, nullWritable); theRecordWriter.write(key1, null); theRecordWriter.write(null, null); theRecordWriter.write(key2, val2); } finally { theRecordWriter.close(tContext); } OutputFormat outputFormat = ReflectionUtils.newInstance( tContext.getOutputFormatClass(), conf); OutputCommitter committer = outputFormat.getOutputCommitter(tContext); committer.commitTask(tContext); }
Example 11
Source Project: 163-bigdate-note File: LogOutputFormat.java License: GNU General Public License v3.0 | 5 votes |
public void write(K key, V value) throws IOException, InterruptedException { TaskID taskID = job.getTaskAttemptID().getTaskID(); int partition = taskID.getId(); String baseName = getFileBaseName(key, NUMBER_FORMAT.format(partition)); RecordWriter<K, V> rw = this.recordWriter.get(baseName); if (rw == null) { rw = getBaseRecordWriter(job, baseName); this.recordWriter.put(baseName, rw); } rw.write(null, value); }
Example 12
Source Project: 163-bigdate-note File: LogOutputFormat.java License: GNU General Public License v3.0 | 5 votes |
public void write(K key, V value) throws IOException, InterruptedException { TaskID taskID = job.getTaskAttemptID().getTaskID(); int partition = taskID.getId(); String baseName = getFileBaseName(key, NUMBER_FORMAT.format(partition)); RecordWriter<K, V> rw = this.recordWriter.get(baseName); if (rw == null) { rw = getBaseRecordWriter(job, baseName); this.recordWriter.put(baseName, rw); } rw.write(null, value); }
Example 13
Source Project: big-c File: TestFileOutputCommitter.java License: Apache License 2.0 | 5 votes |
private void writeMapFileOutput(RecordWriter theRecordWriter, TaskAttemptContext context) throws IOException, InterruptedException { try { int key = 0; for (int i = 0 ; i < 10; ++i) { key = i; Text val = (i%2 == 1) ? val1 : val2; theRecordWriter.write(new LongWritable(key), val); } } finally { theRecordWriter.close(context); } }
Example 14
Source Project: hadoop File: TestRecovery.java License: Apache License 2.0 | 5 votes |
private void writeOutput(TaskAttempt attempt, Configuration conf) throws Exception { TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, TypeConverter.fromYarn(attempt.getID())); TextOutputFormat<?, ?> theOutputFormat = new TextOutputFormat(); RecordWriter theRecordWriter = theOutputFormat .getRecordWriter(tContext); NullWritable nullWritable = NullWritable.get(); try { theRecordWriter.write(key1, val1); theRecordWriter.write(null, nullWritable); theRecordWriter.write(null, val1); theRecordWriter.write(nullWritable, val2); theRecordWriter.write(key2, nullWritable); theRecordWriter.write(key1, null); theRecordWriter.write(null, null); theRecordWriter.write(key2, val2); } finally { theRecordWriter.close(tContext); } OutputFormat outputFormat = ReflectionUtils.newInstance( tContext.getOutputFormatClass(), conf); OutputCommitter committer = outputFormat.getOutputCommitter(tContext); committer.commitTask(tContext); }
Example 15
Source Project: hadoop File: TestFileOutputCommitter.java License: Apache License 2.0 | 5 votes |
private void writeMapFileOutput(RecordWriter theRecordWriter, TaskAttemptContext context) throws IOException, InterruptedException { try { int key = 0; for (int i = 0 ; i < 10; ++i) { key = i; Text val = (i%2 == 1) ? val1 : val2; theRecordWriter.write(new LongWritable(key), val); } } finally { theRecordWriter.close(context); } }
Example 16
Source Project: datawave File: MultiRFileOutputFormatterTest.java License: Apache License 2.0 | 4 votes |
private void writeShardEntry(RecordWriter<BulkIngestKey,Value> writer, int shardId) throws IOException, InterruptedException { writer.write(new BulkIngestKey(new Text(TableName.SHARD), new Key("20100101_" + shardId, "bla", "bla")), new Value(new byte[0])); }
Example 17
Source Project: big-c File: TestMRSequenceFileAsBinaryOutputFormat.java License: Apache License 2.0 | 4 votes |
public void testBinary() throws IOException, InterruptedException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf); Path outdir = new Path(System.getProperty("test.build.data", "/tmp"), "outseq"); Random r = new Random(); long seed = r.nextLong(); r.setSeed(seed); FileOutputFormat.setOutputPath(job, outdir); SequenceFileAsBinaryOutputFormat.setSequenceFileOutputKeyClass(job, IntWritable.class ); SequenceFileAsBinaryOutputFormat.setSequenceFileOutputValueClass(job, DoubleWritable.class ); SequenceFileAsBinaryOutputFormat.setCompressOutput(job, true); SequenceFileAsBinaryOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK); BytesWritable bkey = new BytesWritable(); BytesWritable bval = new BytesWritable(); TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration()); OutputFormat<BytesWritable, BytesWritable> outputFormat = new SequenceFileAsBinaryOutputFormat(); OutputCommitter committer = outputFormat.getOutputCommitter(context); committer.setupJob(job); RecordWriter<BytesWritable, BytesWritable> writer = outputFormat. getRecordWriter(context); IntWritable iwritable = new IntWritable(); DoubleWritable dwritable = new DoubleWritable(); DataOutputBuffer outbuf = new DataOutputBuffer(); LOG.info("Creating data by SequenceFileAsBinaryOutputFormat"); try { for (int i = 0; i < RECORDS; ++i) { iwritable = new IntWritable(r.nextInt()); iwritable.write(outbuf); bkey.set(outbuf.getData(), 0, outbuf.getLength()); outbuf.reset(); dwritable = new DoubleWritable(r.nextDouble()); dwritable.write(outbuf); bval.set(outbuf.getData(), 0, outbuf.getLength()); outbuf.reset(); writer.write(bkey, bval); } } finally { writer.close(context); } committer.commitTask(context); committer.commitJob(job); InputFormat<IntWritable, DoubleWritable> iformat = new SequenceFileInputFormat<IntWritable, DoubleWritable>(); int count = 0; r.setSeed(seed); SequenceFileInputFormat.setInputPaths(job, outdir); LOG.info("Reading data by SequenceFileInputFormat"); for (InputSplit split : iformat.getSplits(job)) { RecordReader<IntWritable, DoubleWritable> reader = iformat.createRecordReader(split, context); MapContext<IntWritable, DoubleWritable, BytesWritable, BytesWritable> mcontext = new MapContextImpl<IntWritable, DoubleWritable, BytesWritable, BytesWritable>(job.getConfiguration(), context.getTaskAttemptID(), reader, null, null, MapReduceTestUtil.createDummyReporter(), split); reader.initialize(split, mcontext); try { int sourceInt; double sourceDouble; while (reader.nextKeyValue()) { sourceInt = r.nextInt(); sourceDouble = r.nextDouble(); iwritable = reader.getCurrentKey(); dwritable = reader.getCurrentValue(); assertEquals( "Keys don't match: " + "*" + iwritable.get() + ":" + sourceInt + "*", sourceInt, iwritable.get()); assertTrue( "Vals don't match: " + "*" + dwritable.get() + ":" + sourceDouble + "*", Double.compare(dwritable.get(), sourceDouble) == 0 ); ++count; } } finally { reader.close(); } } assertEquals("Some records not found", RECORDS, count); }
Example 18
Source Project: hadoop File: TestMRSequenceFileAsBinaryOutputFormat.java License: Apache License 2.0 | 4 votes |
public void testBinary() throws IOException, InterruptedException { Configuration conf = new Configuration(); Job job = Job.getInstance(conf); Path outdir = new Path(System.getProperty("test.build.data", "/tmp"), "outseq"); Random r = new Random(); long seed = r.nextLong(); r.setSeed(seed); FileOutputFormat.setOutputPath(job, outdir); SequenceFileAsBinaryOutputFormat.setSequenceFileOutputKeyClass(job, IntWritable.class ); SequenceFileAsBinaryOutputFormat.setSequenceFileOutputValueClass(job, DoubleWritable.class ); SequenceFileAsBinaryOutputFormat.setCompressOutput(job, true); SequenceFileAsBinaryOutputFormat.setOutputCompressionType(job, CompressionType.BLOCK); BytesWritable bkey = new BytesWritable(); BytesWritable bval = new BytesWritable(); TaskAttemptContext context = MapReduceTestUtil.createDummyMapTaskAttemptContext(job.getConfiguration()); OutputFormat<BytesWritable, BytesWritable> outputFormat = new SequenceFileAsBinaryOutputFormat(); OutputCommitter committer = outputFormat.getOutputCommitter(context); committer.setupJob(job); RecordWriter<BytesWritable, BytesWritable> writer = outputFormat. getRecordWriter(context); IntWritable iwritable = new IntWritable(); DoubleWritable dwritable = new DoubleWritable(); DataOutputBuffer outbuf = new DataOutputBuffer(); LOG.info("Creating data by SequenceFileAsBinaryOutputFormat"); try { for (int i = 0; i < RECORDS; ++i) { iwritable = new IntWritable(r.nextInt()); iwritable.write(outbuf); bkey.set(outbuf.getData(), 0, outbuf.getLength()); outbuf.reset(); dwritable = new DoubleWritable(r.nextDouble()); dwritable.write(outbuf); bval.set(outbuf.getData(), 0, outbuf.getLength()); outbuf.reset(); writer.write(bkey, bval); } } finally { writer.close(context); } committer.commitTask(context); committer.commitJob(job); InputFormat<IntWritable, DoubleWritable> iformat = new SequenceFileInputFormat<IntWritable, DoubleWritable>(); int count = 0; r.setSeed(seed); SequenceFileInputFormat.setInputPaths(job, outdir); LOG.info("Reading data by SequenceFileInputFormat"); for (InputSplit split : iformat.getSplits(job)) { RecordReader<IntWritable, DoubleWritable> reader = iformat.createRecordReader(split, context); MapContext<IntWritable, DoubleWritable, BytesWritable, BytesWritable> mcontext = new MapContextImpl<IntWritable, DoubleWritable, BytesWritable, BytesWritable>(job.getConfiguration(), context.getTaskAttemptID(), reader, null, null, MapReduceTestUtil.createDummyReporter(), split); reader.initialize(split, mcontext); try { int sourceInt; double sourceDouble; while (reader.nextKeyValue()) { sourceInt = r.nextInt(); sourceDouble = r.nextDouble(); iwritable = reader.getCurrentKey(); dwritable = reader.getCurrentValue(); assertEquals( "Keys don't match: " + "*" + iwritable.get() + ":" + sourceInt + "*", sourceInt, iwritable.get()); assertTrue( "Vals don't match: " + "*" + dwritable.get() + ":" + sourceDouble + "*", Double.compare(dwritable.get(), sourceDouble) == 0 ); ++count; } } finally { reader.close(); } } assertEquals("Some records not found", RECORDS, count); }
Example 19
Source Project: datawave File: CBMutationOutputFormatterTest.java License: Apache License 2.0 | 2 votes |
@Test public void testRecordWriterWriteWithTableNameWithUpdates() throws IOException, InterruptedException { CBMutationOutputFormatterTest.logger.info("testRecordWriterWriteWithTableNameWithUpdates called..."); try { CBMutationOutputFormatter uut = new CBMutationOutputFormatter(); Assert.assertNotNull("CBMutationOutputFormatter constructor failed to generate an instance.", uut); Configuration conf = new Configuration(); String simulationKey = String.format("%s.%s.%s", AccumuloOutputFormat.class.getSimpleName(), Features.SIMULATION_MODE.getDeclaringClass() .getSimpleName(), StringUtils.camelize(Features.SIMULATION_MODE.name().toLowerCase())); conf.set(simulationKey, Boolean.TRUE.toString()); conf.setInt("AccumuloOutputFormat.GeneralOpts.LogLevel", Level.ALL.toInt()); conf.set(ShardedDataTypeHandler.SHARD_TNAME, "test-table"); TaskAttemptContext attempt = new TaskAttemptContextImpl(conf, new TaskAttemptID()); RecordWriter<Text,Mutation> rw = uut.getRecordWriter(attempt); Assert.assertNotNull("CBMutationOutputFormatter#getRecordWriter failed to create an instance of RecordWriter", rw); Text key = new Text("test-table"); Mutation value = new Mutation("hello, world".getBytes()); value.put("colf".getBytes(), "colq".getBytes(), "hello, world!!".getBytes()); rw.write(key, value); List<String> entries = uutAppender.retrieveLogsEntries(); Assert.assertTrue("CBMutationOutputFormatter$getRecordWriter#write failed to create simulation warning message.", processOutputContains(entries, "Simulating output only. No writes to tables will occur")); Assert.assertTrue("CBMutationOutputFormatter$getRecordWriter#write failed to create Event Table name message.", processOutputContains(entries, "Event Table Name property for ")); Assert.assertTrue("CBMutationOutputFormatter$getRecordWriter#write failed to create simulation warning message.", processOutputContains(entries, "Table test-table row key: ")); Assert.assertTrue("CBMutationOutputFormatter$getRecordWriter#write failed to table column update message.", processOutputContains(entries, "Table test-table column: colf:colq")); Assert.assertTrue("CBMutationOutputFormatter$getRecordWriter#write failed to table column security message.", processOutputContains(entries, "Table test-table security: ")); Assert.assertTrue("CBMutationOutputFormatter$getRecordWriter#write failed to table value message.", processOutputContains(entries, "Table test-table value: ")); } finally { CBMutationOutputFormatterTest.logger.info("testRecordWriterWriteWithTableNameWithUpdates completed."); } }
Example 20
Source Project: datawave File: CBMutationOutputFormatterTest.java License: Apache License 2.0 | 2 votes |
@Test public void testRecordWriterWriteWithUpdatesAndTypes() throws IOException, InterruptedException { CBMutationOutputFormatterTest.logger.info("testRecordWriterWriteWithUpdatesAndTypes called..."); try { CBMutationOutputFormatter uut = new CBMutationOutputFormatter(); Assert.assertNotNull("CBMutationOutputFormatter constructor failed to generate an instance.", uut); URL url = CBMutationOutputFormatterTest.class.getResource("/datawave/ingest/mapreduce/job/IngestJob-test-type.xml"); Configuration conf = new Configuration(); conf.addResource(url); TypeRegistry.getInstance(conf); String simulationKey = String.format("%s.%s.%s", AccumuloOutputFormat.class.getSimpleName(), Features.SIMULATION_MODE.getDeclaringClass() .getSimpleName(), StringUtils.camelize(Features.SIMULATION_MODE.name().toLowerCase())); conf.set(simulationKey, Boolean.TRUE.toString()); conf.setInt("AccumuloOutputFormat.GeneralOpts.LogLevel", Level.ALL.toInt()); conf.set(ShardedDataTypeHandler.SHARD_TNAME, "test-table"); TaskAttemptContext attempt = new TaskAttemptContextImpl(conf, new TaskAttemptID()); RecordWriter<Text,Mutation> rw = uut.getRecordWriter(attempt); Assert.assertNotNull("CBMutationOutputFormatter#getRecordWriter failed to create an instance of RecordWriter", rw); Text key = new Text("test-table"); Mutation value = new Mutation("hello, world".getBytes()); value.put("colf".getBytes(), "colq".getBytes(), "hello, world!!".getBytes()); rw.write(key, value); List<String> entries = uutAppender.retrieveLogsEntries(); Assert.assertTrue("CBMutationOutputFormatter$getRecordWriter#write failed to create simulation warning message.", processOutputContains(entries, "Simulating output only. No writes to tables will occur")); Assert.assertTrue("CBMutationOutputFormatter$getRecordWriter#write failed to create Event Table name message.", processOutputContains(entries, "Event Table Name property for ")); Assert.assertTrue("CBMutationOutputFormatter$getRecordWriter#write failed to create simulation warning message.", processOutputContains(entries, "Table test-table row key: ")); Assert.assertTrue("CBMutationOutputFormatter$getRecordWriter#write failed to table column update message.", processOutputContains(entries, "Table test-table column: colf:colq")); Assert.assertTrue("CBMutationOutputFormatter$getRecordWriter#write failed to table column security message.", processOutputContains(entries, "Table test-table security: ")); Assert.assertTrue("CBMutationOutputFormatter$getRecordWriter#write failed to table value message.", processOutputContains(entries, "Table test-table value: ")); } finally { CBMutationOutputFormatterTest.logger.info("testRecordWriterWriteWithUpdatesAndTypes completed."); } }