Java Code Examples for org.datavec.api.transform.ColumnType#Long

The following examples show how to use org.datavec.api.transform.ColumnType#Long . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LongMetaData.java    From DataVec with Apache License 2.0 4 votes vote down vote up
@Override
public ColumnType getColumnType() {
    return ColumnType.Long;
}
 
Example 2
Source File: TimeAnalysis.java    From DataVec with Apache License 2.0 4 votes vote down vote up
@Override
public ColumnType getColumnType() {
    return ColumnType.Long;
}
 
Example 3
Source File: LongAnalysis.java    From DataVec with Apache License 2.0 4 votes vote down vote up
@Override
public ColumnType getColumnType() {
    return ColumnType.Long;
}
 
Example 4
Source File: TestMultiOpReduce.java    From DataVec with Apache License 2.0 4 votes vote down vote up
@Test
public void testCustomReductionsWithCondition() {

    List<List<Writable>> inputs = new ArrayList<>();
    inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(1), new Text("zero"),
            new DoubleWritable(0)));
    inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(2), new Text("one"),
            new DoubleWritable(1)));
    inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(3), new Text("two"),
            new DoubleWritable(2)));
    inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(4), new Text("three"),
            new DoubleWritable(3)));

    List<Writable> expected = Arrays.asList((Writable) new Text("someKey"), new IntWritable(10), new IntWritable(3),
            new DoubleWritable(1));


    Schema schema = new Schema.Builder().addColumnString("key").addColumnInteger("intCol")
            .addColumnString("textCol").addColumnString("doubleCol").build();

    Reducer reducer = new Reducer.Builder(ReduceOp.Sum).keyColumns("key")
            .conditionalReduction("textCol", "condTextCol",
                    ReduceOp.Count, new StringColumnCondition("textCol", ConditionOp.NotEqual, "three"))
            .customReduction("doubleCol", new CustomReduceTakeSecond()).build();

    reducer.setInputSchema(schema);


    IAggregableReduceOp<List<Writable>, List<Writable>> accumulator = reducer.aggregableReducer();

    for (int i = 0; i < inputs.size(); i++) {
        accumulator.accept(inputs.get(i));
    }
    List<Writable> out = accumulator.get();

    assertEquals(4, out.size());
    assertEquals(expected, out);

    //Check schema:
    String[] expNames = new String[] {"key", "sum(intCol)", "condTextCol", "myCustomReduce(doubleCol)"};
    ColumnType[] expTypes =
            new ColumnType[] {ColumnType.String, ColumnType.Integer, ColumnType.Long, ColumnType.String};
    Schema outSchema = reducer.transform(schema);

    assertEquals(4, outSchema.numColumns());
    for (int i = 0; i < 4; i++) {
        assertEquals(expNames[i], outSchema.getName(i));
        assertEquals(expTypes[i], outSchema.getType(i));
    }
}
 
Example 5
Source File: LongMetaData.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public ColumnType getColumnType() {
    return ColumnType.Long;
}
 
Example 6
Source File: TimeAnalysis.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public ColumnType getColumnType() {
    return ColumnType.Long;
}
 
Example 7
Source File: LongAnalysis.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Override
public ColumnType getColumnType() {
    return ColumnType.Long;
}
 
Example 8
Source File: TestMultiOpReduce.java    From deeplearning4j with Apache License 2.0 4 votes vote down vote up
@Test
public void testCustomReductionsWithCondition() {

    List<List<Writable>> inputs = new ArrayList<>();
    inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(1), new Text("zero"),
            new DoubleWritable(0)));
    inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(2), new Text("one"),
            new DoubleWritable(1)));
    inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(3), new Text("two"),
            new DoubleWritable(2)));
    inputs.add(Arrays.asList((Writable) new Text("someKey"), new IntWritable(4), new Text("three"),
            new DoubleWritable(3)));

    List<Writable> expected = Arrays.asList((Writable) new Text("someKey"), new IntWritable(10), new IntWritable(3),
            new DoubleWritable(1));


    Schema schema = new Schema.Builder().addColumnString("key").addColumnInteger("intCol")
            .addColumnString("textCol").addColumnString("doubleCol").build();

    Reducer reducer = new Reducer.Builder(ReduceOp.Sum).keyColumns("key")
            .conditionalReduction("textCol", "condTextCol",
                    ReduceOp.Count, new StringColumnCondition("textCol", ConditionOp.NotEqual, "three"))
            .customReduction("doubleCol", new CustomReduceTakeSecond()).build();

    reducer.setInputSchema(schema);


    IAggregableReduceOp<List<Writable>, List<Writable>> accumulator = reducer.aggregableReducer();

    for (int i = 0; i < inputs.size(); i++) {
        accumulator.accept(inputs.get(i));
    }
    List<Writable> out = accumulator.get();

    assertEquals(4, out.size());
    assertEquals(expected, out);

    //Check schema:
    String[] expNames = new String[] {"key", "sum(intCol)", "condTextCol", "myCustomReduce(doubleCol)"};
    ColumnType[] expTypes =
            new ColumnType[] {ColumnType.String, ColumnType.Integer, ColumnType.Long, ColumnType.String};
    Schema outSchema = reducer.transform(schema);

    assertEquals(4, outSchema.numColumns());
    for (int i = 0; i < 4; i++) {
        assertEquals(expNames[i], outSchema.getName(i));
        assertEquals(expTypes[i], outSchema.getType(i));
    }
}