Java Code Examples for org.apache.hadoop.io.IntWritable#get()

The following examples show how to use org.apache.hadoop.io.IntWritable#get() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TempSort.java    From MapReduce-Demo with MIT License 6 votes vote down vote up
public void reduce(Text key,Iterable<IntWritable> values,Context context) throws IOException, InterruptedException {
//			气温降序排序,区第一个
//			IntWritable temp = values.iterator().next();
//			System.out.println("气温:"+temp);
//			context.write(key, temp);
			
			int  maxTemp = Integer.MIN_VALUE;
			for(IntWritable value:values) {
				System.out.println("年:"+key+", 气温:"+value);
				if (value.get()>maxTemp) {
					maxTemp = value.get();
				}
			}
			System.out.println("Date:"+key+", MaxTemp:"+maxTemp);
			context.write(key, new IntWritable(maxTemp));
		}
 
Example 2
Source File: TestDatamerge.java    From RDFS with Apache License 2.0 5 votes vote down vote up
public void map(IntWritable key, TupleWritable val,
    OutputCollector<IntWritable, IntWritable> out, Reporter reporter)
    throws IOException {
  int k = key.get();
  final String kvstr = "Unexpected tuple: " + stringify(key, val);
  assertTrue(kvstr, 0 == k % (srcs * srcs));
  for (int i = 0; i < val.size(); ++i) {
    final int vali = ((IntWritable)val.get(i)).get();
    assertTrue(kvstr, (vali - i) * srcs == 10 * k);
  }
  out.collect(key, one);
}
 
Example 3
Source File: CellCounter.java    From hbase with Apache License 2.0 5 votes vote down vote up
public void reduce(Key key, Iterable<IntWritable> values,
  Context context)
throws IOException, InterruptedException {
  int sum = 0;
  for (IntWritable val : values) {
    sum += val.get();
  }
  result.set(sum);
  context.write(key, result);
}
 
Example 4
Source File: LinkCountCooccurrences.java    From marklogic-contentpump with Apache License 2.0 5 votes vote down vote up
public void reduce(Text key, Iterable<IntWritable> values, 
        Context context
        ) throws IOException, InterruptedException {        
    int sum = 0;
    for (IntWritable val : values) {
        sum += val.get();
    }
    result.set(sum);
    context.write(key, result);
}
 
Example 5
Source File: MrjobRemoteCommitDemo.java    From bigdata-tutorial with Apache License 2.0 5 votes vote down vote up
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
	int sum = 0;
	for (IntWritable val : values) {
		sum += val.get();
	}
	result.set(sum);
	context.write(key, result);
}
 
Example 6
Source File: TestMapRed.java    From hadoop-gpu with Apache License 2.0 5 votes vote down vote up
public void reduce(IntWritable key, Iterator<IntWritable> it,
                   OutputCollector<IntWritable, IntWritable> out,
                   Reporter reporter) throws IOException {
  int keyint = key.get();
  int total = 0;
  while (it.hasNext()) {
    total += it.next().get();
  }
  out.collect(new IntWritable(keyint), new IntWritable(total));
}
 
Example 7
Source File: DataProfNoCriteriaReducer.java    From jumbune with GNU Lesser General Public License v3.0 5 votes vote down vote up
public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
	
	int sum = 0;
	for (IntWritable val : values) {
		sum += val.get();
	}
	DataProfNoCritBean dataProfNoCritBean = new DataProfNoCritBean();
	dataProfNoCritBean.setKey(key.toString());
	dataProfNoCritBean.setValue(sum);
	int pos = getPos(sorted, dataProfNoCritBean);
	insert(pos, sorted, dataProfNoCritBean);
}
 
Example 8
Source File: CustomWritableWithCircle.java    From pxf with Apache License 2.0 5 votes vote down vote up
@Override
public void readFields(DataInput paramDataInput)
    throws IOException
  {
    IntWritable localIntWritable = new IntWritable();

    localIntWritable.readFields(paramDataInput);
    this.int1 = localIntWritable.get();

    Text localText = new Text();
    localText.readFields(paramDataInput);
    this.circle = localText.toString();
  }
 
Example 9
Source File: GroupByOrderByMRRTest.java    From incubator-tez with Apache License 2.0 5 votes vote down vote up
public void reduce(Text key, Iterable<IntWritable> values,
    Context context
    ) throws IOException, InterruptedException {

  int sum = 0;
  for (IntWritable val : values) {
    sum += val.get();
  }
  result.set(sum);
  context.write(result, key);
}
 
Example 10
Source File: CustomWritableWithChar.java    From pxf with Apache License 2.0 5 votes vote down vote up
@Override
public void readFields(DataInput paramDataInput)
    throws IOException
  {
    IntWritable localIntWritable = new IntWritable();

    localIntWritable.readFields(paramDataInput);
    this.int1 = localIntWritable.get();

    localIntWritable.readFields(paramDataInput);
    this.char1 = ((char)localIntWritable.get());
  }
 
Example 11
Source File: WordCountOutput.java    From aerospike-hadoop with Apache License 2.0 5 votes vote down vote up
@Override
public void writeAerospike(Text key,
                           IntWritable value,
                           AerospikeClient client,
                           WritePolicy writePolicy,
                           String namespace,
                           String setName) throws IOException {
    Key kk = new Key(namespace, setName, key.toString());
    Bin bin1 = new Bin("word", key.toString());
    Bin bin2 = new Bin("count", value.get());
    client.put(writePolicy, kk, bin1, bin2);
}
 
Example 12
Source File: TestOrderedWordCount.java    From tez with Apache License 2.0 5 votes vote down vote up
public void reduce(Text key, Iterable<IntWritable> values,
                   Context context
                   ) throws IOException, InterruptedException {
  int sum = 0;
  for (IntWritable val : values) {
    sum += val.get();
  }
  result.set(sum);
  context.write(result, key);
}
 
Example 13
Source File: ColumnCardinalityReducer.java    From Kylin with Apache License 2.0 5 votes vote down vote up
@Override
public void reduce(IntWritable key, Iterable<BytesWritable> values, Context context) throws IOException, InterruptedException {
    int skey = key.get();
    for (BytesWritable v : values) {
        ByteBuffer buffer = ByteBuffer.wrap(v.getBytes());
        HyperLogLogPlusCounter hll = new HyperLogLogPlusCounter();
        hll.readRegisters(buffer);
        getHllc(skey).merge(hll);
        hll.clear();
    }
}
 
Example 14
Source File: LinkCountHDFS.java    From marklogic-contentpump with Apache License 2.0 5 votes vote down vote up
public void reduce(Text key, Iterable<IntWritable> values, 
    Context context) throws IOException, InterruptedException {
    int sum = 0;
    for (IntWritable val : values) {
        sum += val.get();
    }
    result.set(sum);
    context.write(key, result);
}
 
Example 15
Source File: ShuffleScheduler.java    From tez with Apache License 2.0 5 votes vote down vote up
/**
 * To determine if failures happened across nodes or not. This will help in
 * determining whether this task needs to be restarted or source needs to
 * be restarted.
 *
 * @param logContext context info for logging
 * @return boolean true indicates this task needs to be restarted
 */
private boolean hasFailedAcrossNodes(String logContext) {
  int numUniqueHosts = uniqueHosts.size();
  Preconditions.checkArgument(numUniqueHosts > 0, "No values in unique hosts");
  int threshold = Math.max(3,
      (int) Math.ceil(numUniqueHosts * hostFailureFraction));
  int total = 0;
  boolean failedAcrossNodes = false;
  for(HostPort host : uniqueHosts) {
    IntWritable failures = hostFailures.get(host);
    if (failures != null && failures.get() > minFailurePerHost) {
      total++;
      failedAcrossNodes = (total > (threshold * minFailurePerHost));
      if (failedAcrossNodes) {
        break;
      }
    }
  }

  LOG.info(logContext + ", numUniqueHosts=" + numUniqueHosts
      + ", hostFailureThreshold=" + threshold
      + ", hostFailuresCount=" + hostFailures.size()
      + ", hosts crossing threshold=" + total
      + ", reducerFetchIssues=" + failedAcrossNodes
    );

  return failedAcrossNodes;
}
 
Example 16
Source File: TestComparators.java    From big-c with Apache License 2.0 4 votes vote down vote up
public boolean equals (IntWritable v1, IntWritable v2) {
  int val1 = v1.get();
  int val2 = v2.get();
  
  return (val1/100) == (val2/100);
}
 
Example 17
Source File: ST_GeometryN.java    From spatial-framework-for-hadoop with Apache License 2.0 4 votes vote down vote up
public BytesWritable evaluate(BytesWritable geomref, IntWritable index) {
	if (geomref == null || geomref.getLength() == 0 || index == null) {
		LogUtils.Log_ArgumentsNull(LOG);
		return null;
	}

	OGCGeometry ogcGeometry = GeometryUtils.geometryFromEsriShape(geomref);
	if (ogcGeometry == null){
		LogUtils.Log_ArgumentsNull(LOG);
		return null;
	}

	int idx = index.get() - 1;  // 1-based UI, 0-based engine
	try {
		GeometryUtils.OGCType ogcType = GeometryUtils.getType(geomref);
		OGCGeometry ogcGeom = null;
		switch(ogcType) {
		case ST_POINT:
			LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOINT, ogcType);
			return null;
		case ST_LINESTRING:
			LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTILINESTRING, ogcType);
			return null;
		case ST_POLYGON:
			LogUtils.Log_InvalidType(LOG, GeometryUtils.OGCType.ST_MULTIPOLYGON, ogcType);
			return null;
		case ST_MULTIPOINT:
			ogcGeom = ((OGCMultiPoint)ogcGeometry).geometryN(idx);
			break;
		case ST_MULTILINESTRING:
			ogcGeom = ((OGCMultiLineString)ogcGeometry).geometryN(idx);
			break;
		case ST_MULTIPOLYGON:
			ogcGeom = ((OGCMultiPolygon)ogcGeometry).geometryN(idx);
			break;
		}
		return GeometryUtils.geometryToEsriShapeBytesWritable(ogcGeom);
	} catch (Exception e) {
		LogUtils.Log_InternalError(LOG, "ST_GeometryN: " + e);
		return null;
	}
}
 
Example 18
Source File: TestJoinProperties.java    From hadoop with Apache License 2.0 4 votes vote down vote up
private void validateKeyValue_INNER_IDENTITY(IntWritable k, IntWritable v) {
  final String kvstr = "Unexpected tuple: " + stringify(k, v);
  int key = k.get();
  assertTrue(kvstr, (key % 2 == 0 && key / 2 <= ITEMS));
  assertTrue(kvstr, v.get() == 0);
}
 
Example 19
Source File: TestCombineSequenceFileInputFormat.java    From hadoop with Apache License 2.0 4 votes vote down vote up
@Test(timeout=10000)
public void testFormat() throws IOException, InterruptedException {
  Job job = Job.getInstance(conf);

  Random random = new Random();
  long seed = random.nextLong();
  random.setSeed(seed);

  localFs.delete(workDir, true);
  FileInputFormat.setInputPaths(job, workDir);

  final int length = 10000;
  final int numFiles = 10;

  // create files with a variety of lengths
  createFiles(length, numFiles, random, job);

  TaskAttemptContext context = MapReduceTestUtil.
    createDummyMapTaskAttemptContext(job.getConfiguration());
  // create a combine split for the files
  InputFormat<IntWritable,BytesWritable> format =
    new CombineSequenceFileInputFormat<IntWritable,BytesWritable>();
  for (int i = 0; i < 3; i++) {
    int numSplits =
      random.nextInt(length/(SequenceFile.SYNC_INTERVAL/20)) + 1;
    LOG.info("splitting: requesting = " + numSplits);
    List<InputSplit> splits = format.getSplits(job);
    LOG.info("splitting: got =        " + splits.size());

    // we should have a single split as the length is comfortably smaller than
    // the block size
    assertEquals("We got more than one splits!", 1, splits.size());
    InputSplit split = splits.get(0);
    assertEquals("It should be CombineFileSplit",
      CombineFileSplit.class, split.getClass());

    // check the split
    BitSet bits = new BitSet(length);
    RecordReader<IntWritable,BytesWritable> reader =
      format.createRecordReader(split, context);
    MapContext<IntWritable,BytesWritable,IntWritable,BytesWritable> mcontext =
      new MapContextImpl<IntWritable,BytesWritable,IntWritable,BytesWritable>(job.getConfiguration(),
      context.getTaskAttemptID(), reader, null, null,
      MapReduceTestUtil.createDummyReporter(), split);
    reader.initialize(split, mcontext);
    assertEquals("reader class is CombineFileRecordReader.",
      CombineFileRecordReader.class, reader.getClass());

    try {
      while (reader.nextKeyValue()) {
        IntWritable key = reader.getCurrentKey();
        BytesWritable value = reader.getCurrentValue();
        assertNotNull("Value should not be null.", value);
        final int k = key.get();
        LOG.debug("read " + k);
        assertFalse("Key in multiple partitions.", bits.get(k));
        bits.set(k);
      }
    } finally {
      reader.close();
    }
    assertEquals("Some keys in no partition.", length, bits.cardinality());
  }
}
 
Example 20
Source File: FSImageSerialization.java    From big-c with Apache License 2.0 4 votes vote down vote up
/** read the int value */
static int readInt(DataInput in) throws IOException {
  IntWritable uInt = TL_DATA.get().U_INT;
  uInt.readFields(in);
  return uInt.get();
}