Java Code Examples for org.apache.hadoop.io.IntWritable#get()
The following examples show how to use
org.apache.hadoop.io.IntWritable#get() .
These examples are extracted from open source projects.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: MapReduce-Demo File: TempSort.java License: MIT License | 6 votes |
public void reduce(Text key,Iterable<IntWritable> values,Context context) throws IOException, InterruptedException { // 气温降序排序,区第一个 // IntWritable temp = values.iterator().next(); // System.out.println("气温:"+temp); // context.write(key, temp); int maxTemp = Integer.MIN_VALUE; for(IntWritable value:values) { System.out.println("年:"+key+", 气温:"+value); if (value.get()>maxTemp) { maxTemp = value.get(); } } System.out.println("Date:"+key+", MaxTemp:"+maxTemp); context.write(key, new IntWritable(maxTemp)); }
Example 2
Source Project: tez File: ShuffleScheduler.java License: Apache License 2.0 | 5 votes |
/** * To determine if failures happened across nodes or not. This will help in * determining whether this task needs to be restarted or source needs to * be restarted. * * @param logContext context info for logging * @return boolean true indicates this task needs to be restarted */ private boolean hasFailedAcrossNodes(String logContext) { int numUniqueHosts = uniqueHosts.size(); Preconditions.checkArgument(numUniqueHosts > 0, "No values in unique hosts"); int threshold = Math.max(3, (int) Math.ceil(numUniqueHosts * hostFailureFraction)); int total = 0; boolean failedAcrossNodes = false; for(HostPort host : uniqueHosts) { IntWritable failures = hostFailures.get(host); if (failures != null && failures.get() > minFailurePerHost) { total++; failedAcrossNodes = (total > (threshold * minFailurePerHost)); if (failedAcrossNodes) { break; } } } LOG.info(logContext + ", numUniqueHosts=" + numUniqueHosts + ", hostFailureThreshold=" + threshold + ", hostFailuresCount=" + hostFailures.size() + ", hosts crossing threshold=" + total + ", reducerFetchIssues=" + failedAcrossNodes ); return failedAcrossNodes; }
Example 3
Source Project: RDFS File: TestDatamerge.java License: Apache License 2.0 | 5 votes |
public void map(IntWritable key, TupleWritable val, OutputCollector<IntWritable, IntWritable> out, Reporter reporter) throws IOException { int k = key.get(); final String kvstr = "Unexpected tuple: " + stringify(key, val); assertTrue(kvstr, 0 == k % (srcs * srcs)); for (int i = 0; i < val.size(); ++i) { final int vali = ((IntWritable)val.get(i)).get(); assertTrue(kvstr, (vali - i) * srcs == 10 * k); } out.collect(key, one); }
Example 4
Source Project: marklogic-contentpump File: LinkCountHDFS.java License: Apache License 2.0 | 5 votes |
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { sum += val.get(); } result.set(sum); context.write(key, result); }
Example 5
Source Project: Kylin File: ColumnCardinalityReducer.java License: Apache License 2.0 | 5 votes |
@Override public void reduce(IntWritable key, Iterable<BytesWritable> values, Context context) throws IOException, InterruptedException { int skey = key.get(); for (BytesWritable v : values) { ByteBuffer buffer = ByteBuffer.wrap(v.getBytes()); HyperLogLogPlusCounter hll = new HyperLogLogPlusCounter(); hll.readRegisters(buffer); getHllc(skey).merge(hll); hll.clear(); } }
Example 6
Source Project: tez File: TestOrderedWordCount.java License: Apache License 2.0 | 5 votes |
public void reduce(Text key, Iterable<IntWritable> values, Context context ) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { sum += val.get(); } result.set(sum); context.write(result, key); }
Example 7
Source Project: aerospike-hadoop File: WordCountOutput.java License: Apache License 2.0 | 5 votes |
@Override public void writeAerospike(Text key, IntWritable value, AerospikeClient client, WritePolicy writePolicy, String namespace, String setName) throws IOException { Key kk = new Key(namespace, setName, key.toString()); Bin bin1 = new Bin("word", key.toString()); Bin bin2 = new Bin("count", value.get()); client.put(writePolicy, kk, bin1, bin2); }
Example 8
Source Project: pxf File: CustomWritableWithChar.java License: Apache License 2.0 | 5 votes |
@Override public void readFields(DataInput paramDataInput) throws IOException { IntWritable localIntWritable = new IntWritable(); localIntWritable.readFields(paramDataInput); this.int1 = localIntWritable.get(); localIntWritable.readFields(paramDataInput); this.char1 = ((char)localIntWritable.get()); }
Example 9
Source Project: incubator-tez File: GroupByOrderByMRRTest.java License: Apache License 2.0 | 5 votes |
public void reduce(Text key, Iterable<IntWritable> values, Context context ) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { sum += val.get(); } result.set(sum); context.write(result, key); }
Example 10
Source Project: pxf File: CustomWritableWithCircle.java License: Apache License 2.0 | 5 votes |
@Override public void readFields(DataInput paramDataInput) throws IOException { IntWritable localIntWritable = new IntWritable(); localIntWritable.readFields(paramDataInput); this.int1 = localIntWritable.get(); Text localText = new Text(); localText.readFields(paramDataInput); this.circle = localText.toString(); }
Example 11
Source Project: jumbune File: DataProfNoCriteriaReducer.java License: GNU Lesser General Public License v3.0 | 5 votes |
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { sum += val.get(); } DataProfNoCritBean dataProfNoCritBean = new DataProfNoCritBean(); dataProfNoCritBean.setKey(key.toString()); dataProfNoCritBean.setValue(sum); int pos = getPos(sorted, dataProfNoCritBean); insert(pos, sorted, dataProfNoCritBean); }
Example 12
Source Project: hadoop-gpu File: TestMapRed.java License: Apache License 2.0 | 5 votes |
public void reduce(IntWritable key, Iterator<IntWritable> it, OutputCollector<IntWritable, IntWritable> out, Reporter reporter) throws IOException { int keyint = key.get(); int total = 0; while (it.hasNext()) { total += it.next().get(); } out.collect(new IntWritable(keyint), new IntWritable(total)); }
Example 13
Source Project: bigdata-tutorial File: MrjobRemoteCommitDemo.java License: Apache License 2.0 | 5 votes |
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { sum += val.get(); } result.set(sum); context.write(key, result); }
Example 14
Source Project: marklogic-contentpump File: LinkCountCooccurrences.java License: Apache License 2.0 | 5 votes |
public void reduce(Text key, Iterable<IntWritable> values, Context context ) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { sum += val.get(); } result.set(sum); context.write(key, result); }
Example 15
Source Project: hbase File: CellCounter.java License: Apache License 2.0 | 5 votes |
public void reduce(Key key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { int sum = 0; for (IntWritable val : values) { sum += val.get(); } result.set(sum); context.write(key, result); }
Example 16
Source Project: big-c File: FSImageSerialization.java License: Apache License 2.0 | 4 votes |
/** read the int value */ static int readInt(DataInput in) throws IOException { IntWritable uInt = TL_DATA.get().U_INT; uInt.readFields(in); return uInt.get(); }
Example 17
Source Project: big-c File: TestComparators.java License: Apache License 2.0 | 4 votes |
public boolean equals (IntWritable v1, IntWritable v2) { int val1 = v1.get(); int val2 = v2.get(); return (val1/100) == (val2/100); }
Example 18
Source Project: spatial-framework-for-hadoop File: ST_GeometryN.java License: Apache License 2.0 | 4 votes |
public BytesWritable evaluate(BytesWritable geomref, IntWritable index) { if (geomref == null || geomref.getLength() == 0 || index == null) { LogUtils.Log_ArgumentsNull(LOG); return null; } OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref); if (ogcGeometry == null){ LogUtils.Log_ArgumentsNull(LOG); return null; } int idx = index.get() - 1; // 1-based UI, 0-based engine try { GeometryUtils.OGCType ogcType = GeometryUtils.getType(geomref); OGCGeometry ogcGeom = null; switch(ogcType) { case ST_POINT: LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOINT, ogcType); return null; case ST_LINESTRING: LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTILINESTRING, ogcType); return null; case ST_POLYGON: LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOLYGON, ogcType); return null; case ST_MULTIPOINT: ogcGeom = ((OGCMultiPoint)ogcGeometry).geometryN(idx); break; case ST_MULTILINESTRING: ogcGeom = ((OGCMultiLineString)ogcGeometry).geometryN(idx); break; case ST_MULTIPOLYGON: ogcGeom = ((OGCMultiPolygon)ogcGeometry).geometryN(idx); break; } return GeometryUtils.geometryToEsriShapeBytesWritable(ogcGeom); } catch (Exception e) { LogUtils.Log_InternalError(LOG, "ST_GeometryN: " + e); return null; } }
Example 19
Source Project: hadoop File: TestJoinProperties.java License: Apache License 2.0 | 4 votes |
private void validateKeyValue_INNER_IDENTITY(IntWritable k, IntWritable v) { final String kvstr = "Unexpected tuple: " + stringify(k, v); int key = k.get(); assertTrue(kvstr, (key % 2 == 0 && key / 2 <= ITEMS)); assertTrue(kvstr, v.get() == 0); }
Example 20
Source Project: hadoop File: TestCombineSequenceFileInputFormat.java License: Apache License 2.0 | 4 votes |
@Test(timeout=10000) public void testFormat() throws IOException, InterruptedException { Job job = Job.getInstance(conf); Random random = new Random(); long seed = random.nextLong(); random.setSeed(seed); localFs.delete(workDir, true); FileInputFormat.setInputPaths(job, workDir); final int length = 10000; final int numFiles = 10; // create files with a variety of lengths createFiles(length, numFiles, random, job); TaskAttemptContext context = MapReduceTestUtil. createDummyMapTaskAttemptContext(job.getConfiguration()); // create a combine split for the files InputFormat<IntWritable,BytesWritable> format = new CombineSequenceFileInputFormat<IntWritable,BytesWritable>(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(length/(SequenceFile.SYNC_INTERVAL/20)) + 1; LOG.info("splitting: requesting = " + numSplits); List<InputSplit> splits = format.getSplits(job); LOG.info("splitting: got = " + splits.size()); // we should have a single split as the length is comfortably smaller than // the block size assertEquals("We got more than one splits!", 1, splits.size()); InputSplit split = splits.get(0); assertEquals("It should be CombineFileSplit", CombineFileSplit.class, split.getClass()); // check the split BitSet bits = new BitSet(length); RecordReader<IntWritable,BytesWritable> reader = format.createRecordReader(split, context); MapContext<IntWritable,BytesWritable,IntWritable,BytesWritable> mcontext = new MapContextImpl<IntWritable,BytesWritable,IntWritable,BytesWritable>(job.getConfiguration(), context.getTaskAttemptID(), reader, null, null, MapReduceTestUtil.createDummyReporter(), split); reader.initialize(split, mcontext); assertEquals("reader class is CombineFileRecordReader.", CombineFileRecordReader.class, reader.getClass()); try { while (reader.nextKeyValue()) { IntWritable key = reader.getCurrentKey(); BytesWritable value = reader.getCurrentValue(); assertNotNull("Value should not be null.", value); final int k = key.get(); LOG.debug("read " + k); assertFalse("Key in multiple partitions.", bits.get(k)); bits.set(k); } } finally { reader.close(); } assertEquals("Some keys in no partition.", length, bits.cardinality()); } }