Java Code Examples for org.apache.hadoop.io.DoubleWritable

The following examples show how to use org.apache.hadoop.io.DoubleWritable. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: pentaho-hadoop-shims   Source File: TypeConverterFactory.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Determine the Hadoop writable type to pass Kettle type back to Hadoop as.
 *
 * @param kettleType
 * @return Java type to convert {@code kettleType} to when sending data back to Hadoop.
 */
public static Class<? extends Writable> getWritableForKettleType( ValueMetaInterface kettleType ) {
  if ( kettleType == null ) {
    return NullWritable.class;
  }
  switch ( kettleType.getType() ) {
    case ValueMetaInterface.TYPE_STRING:
    case ValueMetaInterface.TYPE_BIGNUMBER:
    case ValueMetaInterface.TYPE_DATE:
      return Text.class;
    case ValueMetaInterface.TYPE_INTEGER:
      return LongWritable.class;
    case ValueMetaInterface.TYPE_NUMBER:
      return DoubleWritable.class;
    case ValueMetaInterface.TYPE_BOOLEAN:
      return BooleanWritable.class;
    case ValueMetaInterface.TYPE_BINARY:
      return BytesWritable.class;
    default:
      return Text.class;
  }
}
 
Example 2
@SuppressWarnings("rawtypes")
public static Class cloverType2Hadoop(DataFieldMetadata field) throws IOException{
	switch (field.getDataType()){
	case BOOLEAN:
		return BooleanWritable.class;
	case BYTE:
	case CBYTE:
		return BytesWritable.class;
	case DATE:
		return LongWritable.class;
	case INTEGER:
		return IntWritable.class;
	case LONG:
		return LongWritable.class;
	case NUMBER:
		return DoubleWritable.class;
	case STRING:
		return Text.class;
	default:
		throw new IOException(String.format("Unsupported CloverDX data type \"%s\" of field \"%s\" in conversion to Hadoop.",field.getDataType().getName(),field.getName()));
		
	}
}
 
Example 3
Source Project: spork   Source File: L12.java    License: Apache License 2.0 6 votes vote down vote up
public void map(
        LongWritable k,
        Text val,
        OutputCollector<Text, DoubleWritable> oc,
        Reporter reporter) throws IOException {

    List<Text> fields = Library.splitLine(val, '');

    // Filter out null users or query terms.
    if (fields.get(0).getLength() == 0 ||
            fields.get(3).getLength() == 0) return;
    try {
        oc.collect(fields.get(0),
            new DoubleWritable(Double.valueOf(fields.get(6).toString())));
    } catch (NumberFormatException nfe) {
        oc.collect(fields.get(0), new DoubleWritable(0));
    }
}
 
Example 4
Source Project: incubator-gobblin   Source File: OrcTestTools.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * All Writable objects passed in here are guaranteed to be primitive writable objects.
 */
private boolean objCastHelper(Object javaObj, Writable obj) {
  if (obj instanceof IntWritable) {
    return ((IntWritable) obj).get() == (Integer) javaObj;
  } else if (obj instanceof Text) {
    return (obj).toString().equals(javaObj);
  } else if (obj instanceof LongWritable) {
    return ((LongWritable) obj).get() == (Long) javaObj;
  } else if (obj instanceof ShortWritable) {
    return ((ShortWritable) obj).get() == (Short) javaObj;
  } else if (obj instanceof DoubleWritable) {
    return ((DoubleWritable) obj).get() == (Double) javaObj;
  } else {
    throw new RuntimeException("Cannot recognize the writable type, please enrich the castHelper function");
  }
}
 
Example 5
Source Project: rheem   Source File: PageRankAlgorithm.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void preSuperstep() {
    if (getSuperstep() >= 3) {
        LOG.info("aggregatedNumVertices=" +
                getAggregatedValue(SUM_AGG) +
                " NumVertices=" + getTotalNumVertices());
        if (this.<LongWritable>getAggregatedValue(SUM_AGG).get() !=
                getTotalNumVertices()) {
            throw new RuntimeException("wrong value of SumAggreg: " +
                    getAggregatedValue(SUM_AGG) + ", should be: " +
                    getTotalNumVertices());
        }
        DoubleWritable maxPagerank = getAggregatedValue(MAX_AGG);
        LOG.info("aggregatedMaxPageRank=" + maxPagerank.get());
        DoubleWritable minPagerank = getAggregatedValue(MIN_AGG);
        LOG.info("aggregatedMinPageRank=" + minPagerank.get());
    }
}
 
Example 6
Source Project: Eagle   Source File: GenericAggregateQuery.java    License: Apache License 2.0 6 votes vote down vote up
protected Map<List<String>, List<Double>> keyValuesToMap(List<GroupbyKeyValue> entities) throws Exception {
	Map<List<String>, List<Double>> aggResultMap = new HashMap<List<String>, List<Double>>();
	try {
		for(GroupbyKeyValue keyValue:entities){
			List<String> key = new ArrayList<String>();
			for(BytesWritable bw:keyValue.getKey().getValue()){
				key.add(new String(bw.copyBytes(), QueryConstants.CHARSET));
			}
			List<Double> value = new ArrayList<Double>();
			for(DoubleWritable wa:keyValue.getValue().getValue()){
				value.add(wa.get());
			}
			aggResultMap.put(key, value);
		}
	} catch (UnsupportedEncodingException e) {
		LOG.error(QueryConstants.CHARSET +" not support: "+e.getMessage(),e);
	}
	return aggResultMap;
}
 
Example 7
Source Project: Kylin   Source File: RowValueDecoder.java    License: Apache License 2.0 6 votes vote down vote up
private void convertToJavaObjects(Object[] mapredObjs, Object[] results) {
    for (int i = 0; i < mapredObjs.length; i++) {
        Object o = mapredObjs[i];

        if (o instanceof LongWritable)
            o = ((LongWritable) o).get();
        else if (o instanceof IntWritable)
            o = ((IntWritable) o).get();
        else if (o instanceof DoubleWritable)
            o = ((DoubleWritable) o).get();
        else if (o instanceof FloatWritable)
            o = ((FloatWritable) o).get();

        results[i] = o;
    }
}
 
Example 8
Source Project: IntroToHadoopAndMR__Udacity_Course   Source File: P1.java    License: Apache License 2.0 6 votes vote down vote up
public final static void main(final String[] args) throws Exception {
	final Configuration conf = new Configuration();

	final Job job = new Job(conf, "P1");
	job.setJarByClass(P1.class);

	job.setOutputKeyClass(Text.class);
	job.setOutputValueClass(DoubleWritable.class);

	job.setMapperClass(P1Map.class);
	job.setCombinerClass(P1Reduce.class);
	job.setReducerClass(P1Reduce.class);

	job.setInputFormatClass(TextInputFormat.class);
	job.setOutputFormatClass(TextOutputFormat.class);

	FileInputFormat.addInputPath(job, new Path(args[0]));
	FileOutputFormat.setOutputPath(job, new Path(args[1]));

	job.waitForCompletion(true);
}
 
Example 9
@Override
public void compute(Vertex<Text, DoubleWritable, Text> vertex, Iterable<DoubleWritable> messages) throws IOException {

    float dampingFactor = this.getConf().getFloat(DAMPING_FACTOR, DAMPING_FACTOR_DEFAULT_VALUE);

    long step = getSuperstep();

    if (step == 0) {
        //set initial value
        logger.debug("Superstep is 0: Setting the default value.");
        vertex.setValue(new DoubleWritable(1.0 / getTotalNumVertices()));
    } else { // go until no one votes to continue

        double rank = 0;
        for (DoubleWritable partial : messages) {
            rank += partial.get();
        }
        rank = ((1 - dampingFactor) / getTotalNumVertices()) + (dampingFactor * rank);
        double vertexValue = vertex.getValue().get();
        double delta = Math.abs(rank - vertexValue) / vertexValue;
        aggregate(MAX_EPSILON, new DoubleWritable(delta));
        vertex.setValue(new DoubleWritable(rank));
        logger.debug("{} is calculated {} for a PageRank.", vertex.getId(), rank);
    }
    distributeRank(vertex);
}
 
Example 10
Source Project: hiped2   Source File: AvroMixedMapReduce.java    License: Apache License 2.0 6 votes vote down vote up
public void reduce(Text key,
                   Iterator<DoubleWritable> values,
                   OutputCollector<AvroWrapper<StockAvg>,
                       NullWritable> output,
                   Reporter reporter) throws IOException {

  Mean mean = new Mean();
  while (values.hasNext()) {
    mean.increment(values.next().get());
  }
  StockAvg avg = new StockAvg();
  avg.setSymbol(key.toString());
  avg.setAvg(mean.getResult());
  output.collect(new AvroWrapper<StockAvg>(avg),
      NullWritable.get());
}
 
Example 11
Source Project: eagle   Source File: GenericCoprocessorAggregateQuery.java    License: Apache License 2.0 6 votes vote down vote up
protected Map<List<String>, List<Double>> keyValuesToMap(List<GroupbyKeyValue> entities) throws Exception {
    Map<List<String>, List<Double>> aggResultMap = new HashMap<List<String>, List<Double>>();
    try {
        for (GroupbyKeyValue keyValue : entities) {
            List<String> key = new ArrayList<String>();
            for (BytesWritable bw : keyValue.getKey().getValue()) {
                key.add(new String(bw.copyBytes(), QueryConstants.CHARSET));
            }
            List<Double> value = new ArrayList<Double>();
            for (DoubleWritable wa : keyValue.getValue().getValue()) {
                value.add(wa.get());
            }
            aggResultMap.put(key, value);
        }
    } catch (UnsupportedEncodingException e) {
        LOG.error(QueryConstants.CHARSET + " not support: " + e.getMessage(), e);
    }
    return aggResultMap;
}
 
Example 12
Source Project: incubator-gobblin   Source File: MRStressTest.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void run() {
  DescriptiveStatistics stats = this.limiter.getRateStatsSinceLastReport();
  long now = System.currentTimeMillis();
  this.runs++;

  if (stats != null) {
    long key;
    if (this.relativeKey) {
      key = 15 * this.runs;
    } else {
      DateTime nowTime = new DateTime(now).withMillisOfSecond(0);
      DateTime rounded = nowTime.withSecondOfMinute(15 * (nowTime.getSecondOfMinute() / 15));
      key = rounded.getMillis() / 1000;
    }


    try {
      this.context.write(new LongWritable(key), new DoubleWritable(stats.getSum()));
    } catch (IOException | InterruptedException ioe) {
      log.error("Error: ", ioe);
    }
  }

}
 
Example 13
@SuppressWarnings("unchecked")
private<T> T convert(Record stratosphereType, int pos, Class<T> hadoopType) {
	if(hadoopType == LongWritable.class ) {
		return (T) new LongWritable((stratosphereType.getField(pos, LongValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.Text.class) {
		return (T) new Text((stratosphereType.getField(pos, StringValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.IntWritable.class) {
		return (T) new IntWritable((stratosphereType.getField(pos, IntValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.FloatWritable.class) {
		return (T) new FloatWritable((stratosphereType.getField(pos, FloatValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.DoubleWritable.class) {
		return (T) new DoubleWritable((stratosphereType.getField(pos, DoubleValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.BooleanWritable.class) {
		return (T) new BooleanWritable((stratosphereType.getField(pos, BooleanValue.class)).getValue());
	}
	if(hadoopType == org.apache.hadoop.io.ByteWritable.class) {
		return (T) new ByteWritable((stratosphereType.getField(pos, ByteValue.class)).getValue());
	}

	throw new RuntimeException("Unable to convert Stratosphere type ("+stratosphereType.getClass().getCanonicalName()+") to Hadoop.");
}
 
Example 14
Source Project: distributed-graph-analytics   Source File: PageRankTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testHighPageRankForOneNode() throws Exception {
    GiraphConfiguration conf = getConf();
    TestGraph<Text, DoubleWritable, Text> input = getHighPageRankGraph(conf);
    InMemoryVertexOutputFormat.initializeOutputGraph(conf);
    InternalVertexRunner.run(conf, input);
    TestGraph<Text, DoubleWritable, Text> output = InMemoryVertexOutputFormat.getOutputGraph();
    assertEquals(8, output.getVertices().size());
    assertTrue(output.getVertex(new Text("8")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("2")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("3")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("4")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("5")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("6")).getValue().get() < output.getVertex(new Text("1")).getValue().get());
    assertTrue(output.getVertex(new Text("7")).getValue().get() < output.getVertex(new Text("1")).getValue().get());

}
 
Example 15
public TextEdgeWriter<Text, DoubleWritable, Text> createEdgeWriter(final RecordWriter<Text, Text> rw) {
    return new TDTEdgeWriter() {
        @Override
        protected RecordWriter<Text, Text> createLineRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException {
            return rw;
        }
    };
}
 
Example 16
private void aggregateQ(Double q) {
//		int aggregators = getNumQAggregators(getConf());
//		if (0 < aggregators) {
//			int modId = (int) this.getId().get() % aggregators;
			aggregate(ACTUAL_Q_AGG, new DoubleWritable(q));
//		}
	}
 
Example 17
Source Project: pxf   Source File: RecordkeyAdapterTest.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Test convertKeyValue for Double type
 */
@Test
public void convertKeyValueDouble() {
    double key = 2.3;
    initRecordkeyAdapter();
    runConvertKeyValue(key, new DoubleWritable(key));
}
 
Example 18
Source Project: WIFIProbe   Source File: CustomerFlowElement.java    License: Apache License 2.0 5 votes vote down vote up
public void write(DataOutput dataOutput) throws IOException {
    Text text = new Text(wifiProb==null?"":wifiProb);
    text.write(dataOutput);

    IntWritable intWritable = new IntWritable();

    intWritable.set(inNoOutWifi);
    intWritable.write(dataOutput);
    intWritable.set(inNoOutStore);
    intWritable.write(dataOutput);

    intWritable.set(outNoInWifi);
    intWritable.write(dataOutput);
    intWritable.set(outNoInStore);
    intWritable.write(dataOutput);

    intWritable.set(inAndOutWifi);
    intWritable.write(dataOutput);
    intWritable.set(inAndOutStore);
    intWritable.write(dataOutput);

    intWritable.set(stayInWifi);
    intWritable.write(dataOutput);
    intWritable.set(stayInStore);
    intWritable.write(dataOutput);

    DoubleWritable doubleWritable = new DoubleWritable();
    doubleWritable.set(jumpRate);
    doubleWritable.write(dataOutput);
    doubleWritable.set(deepVisit);
    doubleWritable.write(dataOutput);
    doubleWritable.set(inStoreRate);
    doubleWritable.write(dataOutput);
}
 
Example 19
Source Project: WIFIProbe   Source File: CustomerFlowElement.java    License: Apache License 2.0 5 votes vote down vote up
public void readFields(DataInput dataInput) throws IOException {
    Text text = new Text();
    text.readFields(dataInput);
    wifiProb = text.toString();

    IntWritable intReader = new IntWritable();

    intReader.readFields(dataInput);
    inNoOutWifi = intReader.get();
    intReader.readFields(dataInput);
    inNoOutStore = intReader.get();

    intReader.readFields(dataInput);
    outNoInWifi = intReader.get();
    intReader.readFields(dataInput);
    outNoInStore = intReader.get();


    intReader.readFields(dataInput);
    inAndOutWifi = intReader.get();
    intReader.readFields(dataInput);
    inAndOutStore = intReader.get();

    intReader.readFields(dataInput);
    stayInWifi = intReader.get();
    intReader.readFields(dataInput);
    stayInStore = intReader.get();


    DoubleWritable doubleWritable = new DoubleWritable();
    doubleWritable.readFields(dataInput);
    jumpRate = doubleWritable.get();
    doubleWritable.readFields(dataInput);
    deepVisit = doubleWritable.get();
    doubleWritable.readFields(dataInput);
    inStoreRate = doubleWritable.get();

}
 
Example 20
Source Project: hadoop   Source File: TypedBytesWritableInput.java    License: Apache License 2.0 5 votes vote down vote up
public Class<? extends Writable> readType() throws IOException {
  Type type = in.readType();
  if (type == null) {
    return null;
  }
  switch (type) {
  case BYTES:
    return BytesWritable.class;
  case BYTE:
    return ByteWritable.class;
  case BOOL:
    return BooleanWritable.class;
  case INT:
    return VIntWritable.class;
  case LONG:
    return VLongWritable.class;
  case FLOAT:
    return FloatWritable.class;
  case DOUBLE:
    return DoubleWritable.class;
  case STRING:
    return Text.class;
  case VECTOR:
    return ArrayWritable.class;
  case MAP:
    return MapWritable.class;
  case WRITABLE:
    return Writable.class;
  default:
    throw new RuntimeException("unknown type");
  }
}
 
Example 21
Source Project: incubator-gobblin   Source File: MRStressTest.java    License: Apache License 2.0 5 votes vote down vote up
public static void main(String[] args) throws Exception {

    CommandLine cli = StressTestUtils.parseCommandLine(OPTIONS, args);

    Configuration configuration = new Configuration();
    if (cli.hasOption(THROTTLING_SERVER_URI.getOpt())) {
      configuration.setBoolean(USE_THROTTLING_SERVER, true);
      String resourceLimited = cli.getOptionValue(RESOURCE_ID_OPT.getOpt(), "MRStressTest");
      configuration.set(RESOURCE_ID, resourceLimited);
      configuration.set(
          BrokerConfigurationKeyGenerator.generateKey(new SharedRestClientFactory(),
              new SharedRestClientKey(RestliLimiterFactory.RESTLI_SERVICE_NAME),
              null, SharedRestClientFactory.SERVER_URI_KEY), cli.getOptionValue(THROTTLING_SERVER_URI.getOpt()));
    }

    if (cli.hasOption(LOCAL_QPS_OPT.getOpt())) {
      configuration .set(LOCALLY_ENFORCED_QPS, cli.getOptionValue(LOCAL_QPS_OPT.getOpt()));
    }

    Job job = Job.getInstance(configuration, "ThrottlingStressTest");
    job.getConfiguration().setBoolean("mapreduce.job.user.classpath.first", true);
    job.getConfiguration().setBoolean("mapreduce.map.speculative", false);

    job.getConfiguration().set(NUM_MAPPERS, cli.getOptionValue(NUM_MAPPERS_OPT.getOpt(), DEFAULT_MAPPERS));
    StressTestUtils.populateConfigFromCli(job.getConfiguration(), cli);

    job.setJarByClass(MRStressTest.class);
    job.setMapperClass(StresserMapper.class);
    job.setReducerClass(AggregatorReducer.class);
    job.setInputFormatClass(MyInputFormat.class);

    job.setOutputKeyClass(LongWritable.class);
    job.setOutputValueClass(DoubleWritable.class);
    FileOutputFormat.setOutputPath(job, new Path("/tmp/MRStressTest" + System.currentTimeMillis()));

    System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
 
Example 22
Source Project: hiped2   Source File: AvroParquetMapReduce.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public void map(Void key,
                Stock value,
                Context context) throws IOException, InterruptedException {
  context.write(new Text(value.getSymbol().toString()),
      new DoubleWritable(value.getOpen()));
}
 
Example 23
Source Project: hive-third-functions   Source File: UDFMathIsInfinite.java    License: Apache License 2.0 5 votes vote down vote up
public BooleanWritable evaluate(DoubleWritable num) {
    if (num == null) {
        result.set(false);
    } else {
        result.set(isInfinite(num.get()));
    }
    return result;
}
 
Example 24
public DoubleWritable evaluate(double mean, double sd, double p) throws HiveException {
    checkCondition(p > 0 && p < 1, "p must be 0 > p > 1");
    checkCondition(sd > 0, "sd must > 0");

    result.set(mean + sd * 1.4142135623730951 * Erf.erfInv(2 * p - 1));
    return result;
}
 
Example 25
Source Project: Kylin   Source File: DoubleSerializer.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public DoubleWritable valueOf(byte[] value) {
    if (value == null)
        current.set(0d);
    else
        current.set(Double.parseDouble(Bytes.toString(value)));
    return current;
}
 
Example 26
Source Project: hive-third-functions   Source File: UDFMathIsNaN.java    License: Apache License 2.0 5 votes vote down vote up
public BooleanWritable evaluate(DoubleWritable num) {
    if (num == null) {
        result.set(false);
    } else {
        result.set(isNaN(num.get()));
    }
    return result;
}
 
Example 27
Source Project: IntroToHadoopAndMR__Udacity_Course   Source File: P1Q1.java    License: Apache License 2.0 5 votes vote down vote up
public final void map(final LongWritable key, final Text value, final Context context)
		throws IOException, InterruptedException {
	final String line = value.toString();
	final String[] data = line.trim().split("\t");
	if (data.length == 6) {
		final String product = data[3];
		final double sales = Double.parseDouble(data[4]);
		word.set(product);
		context.write(word, new DoubleWritable(sales));
	}
}
 
Example 28
Source Project: marklogic-contentpump   Source File: InternalUtilities.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Create new XdmValue from value type and Writables.
 *  
 */
public static XdmValue newValue(ValueType valueType, Object value) {
    if (value instanceof Text) {
        return ValueFactory.newValue(valueType, ((Text)value).toString());
    } else if (value instanceof BytesWritable) {
        return ValueFactory.newValue(valueType, ((BytesWritable)value).getBytes());
    } else if (value instanceof IntWritable) {
        return ValueFactory.newValue(valueType, ((IntWritable)value).get());
    } else if (value instanceof LongWritable) {
        return ValueFactory.newValue(valueType, ((LongWritable)value).get());
    } else if (value instanceof VIntWritable) {
        return ValueFactory.newValue(valueType, ((VIntWritable)value).get());
    } else if (value instanceof VLongWritable) {
        return ValueFactory.newValue(valueType, ((VLongWritable)value).get());
    } else if (value instanceof BooleanWritable) {
        return ValueFactory.newValue(valueType, ((BooleanWritable)value).get());
    } else if (value instanceof FloatWritable) {
        return ValueFactory.newValue(valueType, ((FloatWritable)value).get());
    } else if (value instanceof DoubleWritable) {
        return ValueFactory.newValue(valueType, ((DoubleWritable)value).get());
    } else if (value instanceof MarkLogicNode) {
        return ValueFactory.newValue(valueType, ((MarkLogicNode)value).get());
    } else {
        throw new UnsupportedOperationException("Value " +  
                value.getClass().getName() + " is unsupported.");
    }
}
 
Example 29
Source Project: big-c   Source File: TypedBytesWritableOutput.java    License: Apache License 2.0 5 votes vote down vote up
public void write(Writable w) throws IOException {
  if (w instanceof TypedBytesWritable) {
    writeTypedBytes((TypedBytesWritable) w);
  } else if (w instanceof BytesWritable) {
    writeBytes((BytesWritable) w);
  } else if (w instanceof ByteWritable) {
    writeByte((ByteWritable) w);
  } else if (w instanceof BooleanWritable) {
    writeBoolean((BooleanWritable) w);
  } else if (w instanceof IntWritable) {
    writeInt((IntWritable) w);
  } else if (w instanceof VIntWritable) {
    writeVInt((VIntWritable) w);
  } else if (w instanceof LongWritable) {
    writeLong((LongWritable) w);
  } else if (w instanceof VLongWritable) {
    writeVLong((VLongWritable) w);
  } else if (w instanceof FloatWritable) {
    writeFloat((FloatWritable) w);
  } else if (w instanceof DoubleWritable) {
    writeDouble((DoubleWritable) w);
  } else if (w instanceof Text) {
    writeText((Text) w);
  } else if (w instanceof ArrayWritable) {
    writeArray((ArrayWritable) w);
  } else if (w instanceof MapWritable) {
    writeMap((MapWritable) w);
  } else if (w instanceof SortedMapWritable) {
    writeSortedMap((SortedMapWritable) w);
  } else if (w instanceof Record) {
    writeRecord((Record) w);
  } else {
    writeWritable(w); // last resort
  }
}
 
Example 30
Source Project: IntroToHadoopAndMR__Udacity_Course   Source File: P1Q2.java    License: Apache License 2.0 5 votes vote down vote up
public final void map(final LongWritable key, final Text value, final Context context)
		throws IOException, InterruptedException {
	final String line = value.toString();
	final String[] data = line.trim().split("\t");
	if (data.length == 6) {
		final String product = data[2];
		final double sales = Double.parseDouble(data[4]);
		word.set(product);
		context.write(word, new DoubleWritable(sales));
	}
}