org.apache.hadoop.io.DoubleWritable Java Examples

The following examples show how to use org.apache.hadoop.io.DoubleWritable. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: GenericAggregateQuery.java    From Eagle with Apache License 2.0 6 votes vote down vote up
protected Map<List<String>, List<Double>> keyValuesToMap(List<GroupbyKeyValue> entities) throws Exception {
	Map<List<String>, List<Double>> aggResultMap = new HashMap<List<String>, List<Double>>();
	try {
		for(GroupbyKeyValue keyValue:entities){
			List<String> key = new ArrayList<String>();
			for(BytesWritable bw:keyValue.getKey().getValue()){
				key.add(new String(bw.copyBytes(), QueryConstants.CHARSET));
			}
			List<Double> value = new ArrayList<Double>();
			for(DoubleWritable wa:keyValue.getValue().getValue()){
				value.add(wa.get());
			}
			aggResultMap.put(key, value);
		}
	} catch (UnsupportedEncodingException e) {
		LOG.error(QueryConstants.CHARSET +" not support: "+e.getMessage(),e);
	}
	return aggResultMap;
}
 
Example #2
Source File: HadoopCloverConvert.java    From CloverETL-Engine with GNU Lesser General Public License v2.1 6 votes vote down vote up
@SuppressWarnings("rawtypes")
public static Class cloverType2Hadoop(DataFieldMetadata field) throws IOException{
	switch (field.getDataType()){
	case BOOLEAN:
		return BooleanWritable.class;
	case BYTE:
	case CBYTE:
		return BytesWritable.class;
	case DATE:
		return LongWritable.class;
	case INTEGER:
		return IntWritable.class;
	case LONG:
		return LongWritable.class;
	case NUMBER:
		return DoubleWritable.class;
	case STRING:
		return Text.class;
	default:
		throw new IOException(String.format("Unsupported CloverDX data type \"%s\" of field \"%s\" in conversion to Hadoop.",field.getDataType().getName(),field.getName()));
		
	}
}
 
Example #3
Source File: DefaultStratosphereTypeConverter.java    From stratosphere with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private<T> T convert(Record stratosphereType, int pos, Class<T> hadoopType) {
	if(hadoopType == LongWritable.class ) {
		return (T) new LongWritable((stratosphereType.getField(pos, LongValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.Text.class) {
		return (T) new Text((stratosphereType.getField(pos, StringValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.IntWritable.class) {
		return (T) new IntWritable((stratosphereType.getField(pos, IntValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.FloatWritable.class) {
		return (T) new FloatWritable((stratosphereType.getField(pos, FloatValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.DoubleWritable.class) {
		return (T) new DoubleWritable((stratosphereType.getField(pos, DoubleValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.BooleanWritable.class) {
		return (T) new BooleanWritable((stratosphereType.getField(pos, BooleanValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.ByteWritable.class) {
		return (T) new ByteWritable((stratosphereType.getField(pos, ByteValue.class)).getValue());
	}

	throw new RuntimeException("Unable to convert Stratosphere type ("+stratosphereType.getClass().getCanonicalName()+") to Hadoop.");
}
 
Example #4
Source File: PageRankTest.java    From distributed-graph-analytics with Apache License 2.0 6 votes vote down vote up
@Test
public void testHighPageRankForOneNode() throws Exception {
    GiraphConfiguration conf = getConf();
    TestGraph<Text, DoubleWritable, Text> input = getHighPageRankGraph(conf);
    InMemoryVertexOutputFormat.initializeOutputGraph(conf);
    InternalVertexRunner.run(conf, input);
    TestGraph<Text, DoubleWritable, Text> output = InMemoryVertexOutputFormat.getOutputGraph();
    assertEquals(8, output.getVertices().size());
    assertTrue(output.getVertex(new Text("8")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("2")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("3")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("4")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("5")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("6")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("7")).getValue().get() < output.getVertex(new Text("1")).getValue().get());

}
 
Example #5
Source File: MRStressTest.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
@Override
public void run() {
  DescriptiveStatistics stats = this.limiter.getRateStatsSinceLastReport();
  long now = System.currentTimeMillis();
  this.runs++;

  if (stats != null) {
    long key;
    if (this.relativeKey) {
      key = 15 * this.runs;
    } else {
      DateTime nowTime = new DateTime(now).withMillisOfSecond(0);
      DateTime rounded = nowTime.withSecondOfMinute(15 * (nowTime.getSecondOfMinute() / 15));
      key = rounded.getMillis() / 1000;
    }


    try {
      this.context.write(new LongWritable(key), new DoubleWritable(stats.getSum()));
    } catch (IOException | InterruptedException ioe) {
      log.error("Error: ", ioe);
    }
  }

}
 
Example #6
Source File: TypeConverterFactory.java    From pentaho-hadoop-shims with Apache License 2.0 6 votes vote down vote up
/**
 * Determine the Hadoop writable type to pass Kettle type back to Hadoop as.
 *
 * @param kettleType
 * @return Java type to convert {@code kettleType} to when sending data back to Hadoop.
 */
public static Class<? extends Writable> getWritableForKettleType( ValueMetaInterface kettleType ) {
  if ( kettleType == null ) {
    return NullWritable.class;
  }
  switch ( kettleType.getType() ) {
    case ValueMetaInterface.TYPE_STRING:
    case ValueMetaInterface.TYPE_BIGNUMBER:
    case ValueMetaInterface.TYPE_DATE:
      return Text.class;
    case ValueMetaInterface.TYPE_INTEGER:
      return LongWritable.class;
    case ValueMetaInterface.TYPE_NUMBER:
      return DoubleWritable.class;
    case ValueMetaInterface.TYPE_BOOLEAN:
      return BooleanWritable.class;
    case ValueMetaInterface.TYPE_BINARY:
      return BytesWritable.class;
    default:
      return Text.class;
  }
}
 
Example #7
Source File: L12.java    From spork with Apache License 2.0 6 votes vote down vote up
public void map(
        LongWritable k,
        Text val,
        OutputCollector<Text, DoubleWritable> oc,
        Reporter reporter) throws IOException {

    List<Text> fields = Library.splitLine(val, '');

    // Filter out null users or query terms.
    if (fields.get(0).getLength() == 0 ||
            fields.get(3).getLength() == 0) return;
    try {
        oc.collect(fields.get(0),
            new DoubleWritable(Double.valueOf(fields.get(6).toString())));
    } catch (NumberFormatException nfe) {
        oc.collect(fields.get(0), new DoubleWritable(0));
    }
}
 
Example #8
Source File: GenericCoprocessorAggregateQuery.java    From eagle with Apache License 2.0 6 votes vote down vote up
protected Map<List<String>, List<Double>> keyValuesToMap(List<GroupbyKeyValue> entities) throws Exception {
    Map<List<String>, List<Double>> aggResultMap = new HashMap<List<String>, List<Double>>();
    try {
        for (GroupbyKeyValue keyValue : entities) {
            List<String> key = new ArrayList<String>();
            for (BytesWritable bw : keyValue.getKey().getValue()) {
                key.add(new String(bw.copyBytes(), QueryConstants.CHARSET));
            }
            List<Double> value = new ArrayList<Double>();
            for (DoubleWritable wa : keyValue.getValue().getValue()) {
                value.add(wa.get());
            }
            aggResultMap.put(key, value);
        }
    } catch (UnsupportedEncodingException e) {
        LOG.error(QueryConstants.CHARSET + " not support: " + e.getMessage(), e);
    }
    return aggResultMap;
}
 
Example #9
Source File: AvroMixedMapReduce.java    From hiped2 with Apache License 2.0 6 votes vote down vote up
public void reduce(Text key,
                   Iterator<DoubleWritable> values,
                   OutputCollector<AvroWrapper<StockAvg>,
                       NullWritable> output,
                   Reporter reporter) throws IOException {

  Mean mean = new Mean();
  while (values.hasNext()) {
    mean.increment(values.next().get());
  }
  StockAvg avg = new StockAvg();
  avg.setSymbol(key.toString());
  avg.setAvg(mean.getResult());
  output.collect(new AvroWrapper<StockAvg>(avg),
      NullWritable.get());
}
 
Example #10
Source File: PageRankComputation.java    From distributed-graph-analytics with Apache License 2.0 6 votes vote down vote up
@Override
public void compute(Vertex<Text, DoubleWritable, Text> vertex, Iterable<DoubleWritable> messages) throws IOException {

    float dampingFactor = this.getConf().getFloat(DAMPING_FACTOR, DAMPING_FACTOR_DEFAULT_VALUE);

    long step = getSuperstep();

    if (step == 0) {
        //set initial value
        logger.debug("Superstep is 0: Setting the default value.");
        vertex.setValue(new DoubleWritable(1.0 / getTotalNumVertices()));
    } else { // go until no one votes to continue

        double rank = 0;
        for (DoubleWritable partial : messages) {
            rank += partial.get();
        }
        rank = ((1 - dampingFactor) / getTotalNumVertices()) + (dampingFactor * rank);
        double vertexValue = vertex.getValue().get();
        double delta = Math.abs(rank - vertexValue) / vertexValue;
        aggregate(MAX_EPSILON, new DoubleWritable(delta));
        vertex.setValue(new DoubleWritable(rank));
        logger.debug("{} is calculated {} for a PageRank.", vertex.getId(), rank);
    }
    distributeRank(vertex);
}
 
Example #11
Source File: P1.java    From IntroToHadoopAndMR__Udacity_Course with Apache License 2.0 6 votes vote down vote up
public final static void main(final String[] args) throws Exception {
	final Configuration conf = new Configuration();

	final Job job = new Job(conf, "P1");
	job.setJarByClass(P1.class);

	job.setOutputKeyClass(Text.class);
	job.setOutputValueClass(DoubleWritable.class);

	job.setMapperClass(P1Map.class);
	job.setCombinerClass(P1Reduce.class);
	job.setReducerClass(P1Reduce.class);

	job.setInputFormatClass(TextInputFormat.class);
	job.setOutputFormatClass(TextOutputFormat.class);

	FileInputFormat.addInputPath(job, new Path(args[0]));
	FileOutputFormat.setOutputPath(job, new Path(args[1]));

	job.waitForCompletion(true);
}
 
Example #12
Source File: OrcTestTools.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
/**
 * All Writable objects passed in here are guaranteed to be primitive writable objects.
 */
private boolean objCastHelper(Object javaObj, Writable obj) {
  if (obj instanceof IntWritable) {
    return ((IntWritable) obj).get() == (Integer) javaObj;
  } else if (obj instanceof Text) {
    return (obj).toString().equals(javaObj);
  } else if (obj instanceof LongWritable) {
    return ((LongWritable) obj).get() == (Long) javaObj;
  } else if (obj instanceof ShortWritable) {
    return ((ShortWritable) obj).get() == (Short) javaObj;
  } else if (obj instanceof DoubleWritable) {
    return ((DoubleWritable) obj).get() == (Double) javaObj;
  } else {
    throw new RuntimeException("Cannot recognize the writable type, please enrich the castHelper function");
  }
}
 
Example #13
Source File: RowValueDecoder.java    From Kylin with Apache License 2.0 6 votes vote down vote up
private void convertToJavaObjects(Object[] mapredObjs, Object[] results) {
    for (int i = 0; i < mapredObjs.length; i++) {
        Object o = mapredObjs[i];

        if (o instanceof LongWritable)
            o = ((LongWritable) o).get();
        else if (o instanceof IntWritable)
            o = ((IntWritable) o).get();
        else if (o instanceof DoubleWritable)
            o = ((DoubleWritable) o).get();
        else if (o instanceof FloatWritable)
            o = ((FloatWritable) o).get();

        results[i] = o;
    }
}
 
Example #14
Source File: PageRankAlgorithm.java    From rheem with Apache License 2.0 6 votes vote down vote up
@Override
public void preSuperstep() {
    if (getSuperstep() >= 3) {
        LOG.info("aggregatedNumVertices=" +
                getAggregatedValue(SUM_AGG) +
                " NumVertices=" + getTotalNumVertices());
        if (this.<LongWritable>getAggregatedValue(SUM_AGG).get() !=
                getTotalNumVertices()) {
            throw new RuntimeException("wrong value of SumAggreg: " +
                    getAggregatedValue(SUM_AGG) + ", should be: " +
                    getTotalNumVertices());
        }
        DoubleWritable maxPagerank = getAggregatedValue(MAX_AGG);
        LOG.info("aggregatedMaxPageRank=" + maxPagerank.get());
        DoubleWritable minPagerank = getAggregatedValue(MIN_AGG);
        LOG.info("aggregatedMinPageRank=" + minPagerank.get());
    }
}
 
Example #15
Source File: RawGroupbyBucket.java    From eagle with Apache License 2.0 5 votes vote down vote up
/**
 * expensive operation - create objects and format the result
 * @return
 */
public List<GroupbyKeyValue> groupbyKeyValues() {
    List<GroupbyKeyValue> results = new ArrayList<GroupbyKeyValue>();
    for (Map.Entry<GroupbyKey, List<Function>> entry : this.group2FunctionMap.entrySet()) {
        GroupbyValue value = new GroupbyValue();
        for (Function f : entry.getValue()) {
            value.add(new DoubleWritable(f.result()));
            value.addMeta(f.count());
        }
        results.add(new GroupbyKeyValue(entry.getKey(),value));
    }
    return results;
}
 
Example #16
Source File: DoubleValueMapperTest.java    From secure-data-service with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetValue() {
    BSONObject field = new BasicBSONObject("field", 1.312D);
    BSONObject entry = new BasicBSONObject("double", field);
    BSONWritable entity = new BSONWritable(entry);

    DoubleValueMapper mapper = new DoubleValueMapper("double.field");

    Writable value = mapper.getValue(entity);
    assertFalse(value instanceof NullWritable);
    assertTrue(value instanceof DoubleWritable);
    assertEquals(((DoubleWritable) value).get(), 1.312D, 0.05);
}
 
Example #17
Source File: DGAEdgeTDTOutputFormatTest.java    From distributed-graph-analytics with Apache License 2.0 5 votes vote down vote up
@Test
// Tests Src,Dest,VertexValue,EdgeValue
public void testWriteGraphWithEdgeValueAndVertexValue() throws Exception {
    TextEdgeWriter<Text, DoubleWritable, Text> writer = createEdgeWriter(rw);
    conf.set(WRITE_EDGE_VALUE, "true");
    conf.set(WRITE_VERTEX_VALUE, "true");
    writer.setConf(conf);
    when(tac.getConfiguration()).thenReturn(conf);
    writer.initialize(tac);
    writer.writeEdge(vertex.getId(), vertex.getValue(), edge1);
    verify(rw).write(new Text("34,12,10.43433333389,1"), null);
    writer.writeEdge(vertex.getId(), vertex.getValue(), edge2);
    verify(rw).write(new Text("34,6,10.43433333389,4"), null);

}
 
Example #18
Source File: DefaultHadoopTypeConverter.java    From stratosphere with Apache License 2.0 5 votes vote down vote up
protected Value convert(Object hadoopType) {
	if(hadoopType instanceof org.apache.hadoop.io.LongWritable ) {
		return new LongValue(((LongWritable)hadoopType).get());
	}
	if(hadoopType instanceof org.apache.hadoop.io.Text) {
		return new StringValue(((Text)hadoopType).toString());
	}
	if(hadoopType instanceof org.apache.hadoop.io.IntWritable) {
		return new IntValue(((IntWritable)hadoopType).get());
	}
	if(hadoopType instanceof org.apache.hadoop.io.FloatWritable) {
		return new FloatValue(((FloatWritable)hadoopType).get());
	}
	if(hadoopType instanceof org.apache.hadoop.io.DoubleWritable) {
		return new DoubleValue(((DoubleWritable)hadoopType).get());
	}
	if(hadoopType instanceof org.apache.hadoop.io.BooleanWritable) {
		return new BooleanValue(((BooleanWritable)hadoopType).get());
	}
	if(hadoopType instanceof org.apache.hadoop.io.ByteWritable) {
		return new ByteValue(((ByteWritable)hadoopType).get());
	}
	if (hadoopType instanceof NullWritable) {
		return NullValue.getInstance();
	}
	
	throw new RuntimeException("Unable to convert Hadoop type ("+hadoopType.getClass().getCanonicalName()+") to Stratosphere.");
}
 
Example #19
Source File: CoprocessorITSuite.java    From eagle with Apache License 2.0 5 votes vote down vote up
private void logGroupbyKeyValue(List<GroupbyKeyValue> keyValues) {
    for (GroupbyKeyValue keyValue : keyValues) {
        GroupbyKey key = keyValue.getKey();
        List<String> keys = new ArrayList<>();
        for (BytesWritable bytes : key.getValue()) {
            keys.add(new String(bytes.copyBytes()));
        }
        List<Double> vals = new ArrayList<>();
        GroupbyValue val = keyValue.getValue();
        for (DoubleWritable dw : val.getValue()) {
            vals.add(dw.get());
        }
        if (LOG.isDebugEnabled()) LOG.debug("KEY: " + keys + ", VALUE: " + vals);
    }
}
 
Example #20
Source File: JMatrixMultiplicationStep3.java    From RecommendationEngine with MIT License 5 votes vote down vote up
public static void run() throws IOException, ClassNotFoundException,
		InterruptedException {
	String inputPath = ItemBasedCFDriver.path.get("step9InputPath");
	String outputPath = ItemBasedCFDriver.path.get("step9OutputPath");

	Configuration conf = new Configuration();
	conf.set("mapred.textoutputformat.separator", ",");

	Job job = Job.getInstance(conf);

	HDFS hdfs = new HDFS(conf);
	hdfs.rmr(outputPath);

	job.setMapperClass(Step3_Mapper.class);
	job.setReducerClass(Step3_Reducer.class);
	job.setCombinerClass(Step3_Reducer.class);
	job.setJarByClass(JMatrixMultiplicationStep3.class);
	job.setNumReduceTasks(ItemBasedCFDriver.ReducerNumber);

	job.setMapOutputKeyClass(Text.class);
	job.setMapOutputValueClass(DoubleWritable.class);
	job.setOutputKeyClass(Text.class);
	job.setOutputValueClass(DoubleWritable.class);

	job.setInputFormatClass(TextInputFormat.class);
	job.setOutputFormatClass(TextOutputFormat.class);

	FileInputFormat.setInputPaths(job, new Path(inputPath));
	FileOutputFormat.setOutputPath(job, new Path(outputPath));

	job.waitForCompletion(true);
}
 
Example #21
Source File: AvroMixedMapReduce.java    From hiped2 with Apache License 2.0 5 votes vote down vote up
public void map(AvroWrapper<Stock> key,
                NullWritable value,
                OutputCollector<Text, DoubleWritable> output,
                Reporter reporter) throws IOException {
  output.collect(new Text(key.datum().getSymbol().toString()),
      new DoubleWritable(key.datum().getOpen()));
}
 
Example #22
Source File: MRStressTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@Override
protected void reduce(LongWritable key, Iterable<DoubleWritable> values, Context context)
    throws IOException, InterruptedException {
  double totalRate = 0;
  int activeMappers = 0;
  for (DoubleWritable value : values) {
    totalRate += value.get();
    activeMappers++;
  }
  context.write(key, new Text(String.format("%f\t%d", totalRate, activeMappers)));
}
 
Example #23
Source File: P1Q2.java    From IntroToHadoopAndMR__Udacity_Course with Apache License 2.0 5 votes vote down vote up
public final void reduce(final Text key, final Iterable<DoubleWritable> values, final Context context)
		throws IOException, InterruptedException {
	double highestSale = 0.0;
	double currentSale;
	for (DoubleWritable val : values) {
		currentSale = val.get();
		if (highestSale < currentSale) {
			highestSale = currentSale;
		}
	}
	context.write(key, new DoubleWritable(highestSale));
}
 
Example #24
Source File: SequenceFileLoader.java    From spork with Apache License 2.0 5 votes vote down vote up
protected byte inferPigDataType(Type t) {
  if (t == BytesWritable.class) return DataType.BYTEARRAY;
  else if (t == Text.class) return DataType.CHARARRAY;
  else if (t == IntWritable.class) return DataType.INTEGER;
  else if (t == LongWritable.class) return DataType.LONG;
  else if (t == FloatWritable.class) return DataType.FLOAT;
  else if (t == DoubleWritable.class) return DataType.DOUBLE;
  else if (t == BooleanWritable.class) return DataType.BOOLEAN;
  else if (t == ByteWritable.class) return DataType.BYTE;
  else if (t == DateTimeWritable.class) return DataType.DATETIME;
  // not doing maps or other complex types for now
  else return DataType.ERROR;
}
 
Example #25
Source File: AvroKeyValueMapReduce.java    From hiped2 with Apache License 2.0 5 votes vote down vote up
@Override
protected void reduce(Text key, Iterable<DoubleWritable> values, Context context) throws IOException, InterruptedException {
  Mean mean = new Mean();
  for (DoubleWritable val: values) {
    mean.increment(val.get());
  }
  StockAvg avg = new StockAvg();
  avg.setSymbol(key.toString());
  avg.setAvg(mean.getResult());
  context.write(key, new AvroValue<StockAvg>(avg));
}
 
Example #26
Source File: DGAEdgeTDTOutputFormatTest.java    From distributed-graph-analytics with Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() throws Exception {
    GiraphConfiguration giraphConfiguration = new GiraphConfiguration();
    conf = new ImmutableClassesGiraphConfiguration<Text, Text, Text>(giraphConfiguration);
    tac = mock(TaskAttemptContext.class);

    vertex = mock(Vertex.class);
    when(vertex.getId()).thenReturn(new Text("34"));
    when(vertex.getValue()).thenReturn(new DoubleWritable(10.43433333389));

    Iterable<Edge<Text, Text>> iterable = mock(Iterable.class);
    Iterator<Edge<Text, Text>> iterator = mock(Iterator.class);
    when(iterable.iterator()).thenReturn(iterator);

    edge1 = mock(Edge.class);
    when(edge1.getTargetVertexId()).thenReturn(new Text("12"));
    when(edge1.getValue()).thenReturn(new Text("1"));

    edge2 = mock(Edge.class);
    when(edge2.getTargetVertexId()).thenReturn(new Text("6"));
    when(edge2.getValue()).thenReturn(new Text("4"));

    rw = mock(RecordWriter.class);

    when(iterator.hasNext()).thenReturn(true, true, false);
    when(iterator.next()).thenReturn(edge1, edge2);

}
 
Example #27
Source File: P1Q3.java    From IntroToHadoopAndMR__Udacity_Course with Apache License 2.0 5 votes vote down vote up
public final void reduce(final Text key, final Iterable<DoubleWritable> values, final Context context)
		throws IOException, InterruptedException {
	double sum = 0.0;
	int i = 0;
	for (final DoubleWritable val : values) {
		i++;
		sum += val.get();
	}
	context.write(new IntWritable(i), new DoubleWritable(sum));
}
 
Example #28
Source File: DoubleWritableToLongConverterTest.java    From pentaho-hadoop-shims with Apache License 2.0 5 votes vote down vote up
@Test
public void convert() throws Exception {
  DoubleWritableToLongConverter c = new DoubleWritableToLongConverter();
  Long expected = 42L;

  assertEquals( expected, c.convert( null, new DoubleWritable( 42.42 ) ) );

  try {
    c.convert( null, null );
    fail();
  } catch ( NullPointerException ex ) {
    // Expected
  }
}
 
Example #29
Source File: P1Q2.java    From IntroToHadoopAndMR__Udacity_Course with Apache License 2.0 5 votes vote down vote up
public final void map(final LongWritable key, final Text value, final Context context)
		throws IOException, InterruptedException {
	final String line = value.toString();
	final String[] data = line.trim().split("\t");
	if (data.length == 6) {
		final String product = data[2];
		final double sales = Double.parseDouble(data[4]);
		word.set(product);
		context.write(word, new DoubleWritable(sales));
	}
}
 
Example #30
Source File: TypedBytesWritableOutput.java    From big-c with Apache License 2.0 5 votes vote down vote up
public void write(Writable w) throws IOException {
  if (w instanceof TypedBytesWritable) {
    writeTypedBytes((TypedBytesWritable) w);
  } else if (w instanceof BytesWritable) {
    writeBytes((BytesWritable) w);
  } else if (w instanceof ByteWritable) {
    writeByte((ByteWritable) w);
  } else if (w instanceof BooleanWritable) {
    writeBoolean((BooleanWritable) w);
  } else if (w instanceof IntWritable) {
    writeInt((IntWritable) w);
  } else if (w instanceof VIntWritable) {
    writeVInt((VIntWritable) w);
  } else if (w instanceof LongWritable) {
    writeLong((LongWritable) w);
  } else if (w instanceof VLongWritable) {
    writeVLong((VLongWritable) w);
  } else if (w instanceof FloatWritable) {
    writeFloat((FloatWritable) w);
  } else if (w instanceof DoubleWritable) {
    writeDouble((DoubleWritable) w);
  } else if (w instanceof Text) {
    writeText((Text) w);
  } else if (w instanceof ArrayWritable) {
    writeArray((ArrayWritable) w);
  } else if (w instanceof MapWritable) {
    writeMap((MapWritable) w);
  } else if (w instanceof SortedMapWritable) {
    writeSortedMap((SortedMapWritable) w);
  } else if (w instanceof Record) {
    writeRecord((Record) w);
  } else {
    writeWritable(w); // last resort
  }
}