Java Code Examples for org.apache.flink.types.Row#getArity()

The following examples show how to use org.apache.flink.types.Row#getArity() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JDBCInputFormat.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
/**
 * Stores the next resultSet row in a tuple.
 *
 * @param row row to be reused.
 * @return row containing next {@link Row}
 * @throws java.io.IOException
 */
@Override
public Row nextRecord(Row row) throws IOException {
	try {
		if (!hasNext) {
			return null;
		}
		for (int pos = 0; pos < row.getArity(); pos++) {
			row.setField(pos, resultSet.getObject(pos + 1));
		}
		//update hasNext after we've read the record
		hasNext = resultSet.next();
		return row;
	} catch (SQLException se) {
		throw new IOException("Couldn't read data - " + se.getMessage(), se);
	} catch (NullPointerException npe) {
		throw new IOException("Couldn't access resultSet", npe);
	}
}
 
Example 2
Source File: RowSerializer.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public void serialize(Row record, DataOutputView target) throws IOException {
	final int len = fieldSerializers.length;

	if (record.getArity() != len) {
		throw new RuntimeException("Row arity of from does not match serializers.");
	}

	// write bitmask
	fillMask(len, record, mask, legacyModeEnabled, legacyOffset);
	writeMask(mask, target);

	// serialize non-null fields
	for (int fieldPos = 0; fieldPos < len; fieldPos++) {
		final Object o = record.getField(fieldPos);
		if (o != null) {
			fieldSerializers[fieldPos].serialize(o, target);
		}
	}
}
 
Example 3
Source File: TestCsvFileSystemFormatFactory.java    From flink with Apache License 2.0 6 votes vote down vote up
private static void writeCsvToStream(
		DataType[] types,
		RowData rowData,
		OutputStream stream) throws IOException {
	LogicalType[] fieldTypes = Arrays.stream(types)
			.map(DataType::getLogicalType)
			.toArray(LogicalType[]::new);
	DataFormatConverters.DataFormatConverter converter = DataFormatConverters.getConverterForDataType(
			TypeConversions.fromLogicalToDataType(RowType.of(fieldTypes)));

	Row row = (Row) converter.toExternal(rowData);
	StringBuilder builder = new StringBuilder();
	Object o;
	for (int i = 0; i < row.getArity(); i++) {
		if (i > 0) {
			builder.append(DEFAULT_FIELD_DELIMITER);
		}
		if ((o = row.getField(i)) != null) {
			builder.append(o);
		}
	}
	String str = builder.toString();
	stream.write(str.getBytes(StandardCharsets.UTF_8));
	stream.write(DEFAULT_LINE_DELIMITER.getBytes(StandardCharsets.UTF_8));
}
 
Example 4
Source File: HBaseOutputFormat.java    From alchemy with Apache License 2.0 6 votes vote down vote up
@Override
public void writeRecord(Tuple2<Boolean, Row> value) throws IOException {
    if (value == null || value.f1 == null) {
        return;
    }
    if (!value.f0) {
        return;
    }
    Row input = value.f1;
    String rowKey = createRowKey(input);
    Put put = new Put(Bytes.toBytes(rowKey));
    final String[] columnNames = this.fieldNames;
    for (int i = 0; i < input.getArity(); i++) {
        String family = findFamily(i);
        if (family == null) {
            continue;
        }
        put.addColumn(Bytes.toBytes(family), Bytes.toBytes(columnNames[i]),
            Bytes.toBytes(getValue(input.getField(i))));
    }
    if (this.hbaseProperties.isSkipWal()) {
        put.setDurability(Durability.SKIP_WAL);
    }
    table.put(put);
}
 
Example 5
Source File: MLEnvironment.java    From Alink with Apache License 2.0 6 votes vote down vote up
/**
 * Factory to create batch {@link Table}.
 * <p>
 * We create batch table by session shared ExecutionEnvironment
 *
 * @param rows     list of rows to create table.
 * @param colNames the column name of the table.
 * @return the created batch table.
 * @see MLEnvironment#getExecutionEnvironment()
 * @see MLEnvironment#getBatchTableEnvironment()
 */
public Table createBatchTable(List<Row> rows, String[] colNames) {
    if (rows == null || rows.size() < 1) {
        throw new IllegalArgumentException("Values can not be empty.");
    }

    Row first = rows.iterator().next();
    int arity = first.getArity();

    TypeInformation<?>[] types = new TypeInformation[arity];

    for (int i = 0; i < arity; ++i) {
        types[i] = TypeExtractor.getForObject(first.getField(i));
    }

    DataSet<Row> dataSet = getExecutionEnvironment().fromCollection(rows);
    return DataSetConversionUtil.toTable(this, dataSet, colNames, types);
}
 
Example 6
Source File: ConvertRowUtil.java    From alchemy with Apache License 2.0 6 votes vote down vote up
public static void convertFromRow(Object object, String[] fieldNames, Row row) {
    Class clazz = object.getClass();
    // validate the row
    if (row.getArity() != fieldNames.length) {
        throw new IllegalStateException(
                String.format("Number of elements in the row '%s' is different from number of field names: %d", row,
                        fieldNames.length));
    }

    for (int i = 0; i < fieldNames.length; i++) {
        if (row.getField(i) == null) {
            continue;
        }
        final String name = fieldNames[i];
        try {
            Field field = clazz.getDeclaredField(name);
            field.setAccessible(true);
            field.set(object, row.getField(i));
        } catch (Exception e) {
            logger.error("Occur Error when convert from Row",e);
        }

    }
}
 
Example 7
Source File: JDBCInputFormat.java    From flink with Apache License 2.0 6 votes vote down vote up
/**
 * Stores the next resultSet row in a tuple.
 *
 * @param row row to be reused.
 * @return row containing next {@link Row}
 * @throws java.io.IOException
 */
@Override
public Row nextRecord(Row row) throws IOException {
	try {
		if (!hasNext) {
			return null;
		}
		for (int pos = 0; pos < row.getArity(); pos++) {
			row.setField(pos, resultSet.getObject(pos + 1));
		}
		//update hasNext after we've read the record
		hasNext = resultSet.next();
		return row;
	} catch (SQLException se) {
		throw new IOException("Couldn't read data - " + se.getMessage(), se);
	} catch (NullPointerException npe) {
		throw new IOException("Couldn't access resultSet", npe);
	}
}
 
Example 8
Source File: CollectSinkOperatorCoordinatorTest.java    From flink with Apache License 2.0 6 votes vote down vote up
private void assertResponseEquals(
		CollectCoordinationRequest request,
		CollectCoordinationResponse response,
		long expectedLastCheckpointedOffset,
		List<Row> expectedResults) throws Exception {
	Assert.assertEquals(request.getVersion(), response.getVersion());
	Assert.assertEquals(expectedLastCheckpointedOffset, response.getLastCheckpointedOffset());
	List<Row> results = response.getResults(serializer);
	Assert.assertEquals(expectedResults.size(), results.size());
	for (int i = 0; i < results.size(); i++) {
		Row expectedRow = expectedResults.get(i);
		Row actualRow = results.get(i);
		Assert.assertEquals(expectedRow.getArity(), actualRow.getArity());
		for (int j = 0; j < actualRow.getArity(); j++) {
			Assert.assertEquals(expectedRow.getField(j), actualRow.getField(j));
		}
	}
}
 
Example 9
Source File: GlmUtil.java    From Alink with Apache License 2.0 6 votes vote down vote up
@Override
public Row map(Row row) {
    Row outRow = new Row(row.getArity());
    for (int i = 0; i < numFeature; i++) {
        outRow.setField(i, row.getField(i));
    }
    double label = (Double) row.getField(numFeature);
    double weight = (Double) row.getField(numFeature + 1);
    double offset = (Double) row.getField(numFeature + 2);

    outRow.setField(numFeature, label - offset);
    outRow.setField(numFeature + 1, weight);
    outRow.setField(numFeature + 2, 0.0);

    return outRow;
}
 
Example 10
Source File: CsvTableSink.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public String map(Row row) {
	StringBuilder builder = new StringBuilder();
	Object o;
	for (int i = 0; i < row.getArity(); i++) {
		if (i > 0) {
			builder.append(fieldDelim);
		}
		if ((o = row.getField(i)) != null) {
			builder.append(o);
		}
	}
	return builder.toString();
}
 
Example 11
Source File: CassandraRowOutputFormat.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected Object[] extractFields(Row record) {

	Object[] fields = new Object[record.getArity()];
	for (int i = 0; i < fields.length; i++) {
		fields[i] = record.getField(i);
	}
	return fields;
}
 
Example 12
Source File: RowUtil.java    From Alink with Apache License 2.0 5 votes vote down vote up
/**
 * remove idx value from row.
 */
public static Row remove(Row rec, int idx) {
    int n1 = rec.getArity();
    Row ret = new Row(n1 - 1);
    for (int i = 0; i < n1; ++i) {
        if (i < idx) {
            ret.setField(i, rec.getField(i));
        } else if (i > idx) {
            ret.setField(i - 1, rec.getField(i));
        }
    }
    return ret;
}
 
Example 13
Source File: CsvTableSink.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public String map(Row row) {
	StringBuilder builder = new StringBuilder();
	Object o;
	for (int i = 0; i < row.getArity(); i++) {
		if (builder.length() != 0) {
			builder.append(fieldDelim);
		}
		if ((o = row.getField(i)) != null) {
			builder.append(o);
		}
	}
	return builder.toString();
}
 
Example 14
Source File: SingleRowStreamSqlJob.java    From zeppelin with Apache License 2.0 5 votes vote down vote up
private List rowToList(Row row) {
  List list = new ArrayList<>();
  for (int i = 0; i < row.getArity(); i++) {
    list.add(row.getField(i));
  }
  return list;
}
 
Example 15
Source File: RowComparatorTest.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
protected void deepEquals(String message, Row should, Row is) {
	int arity = should.getArity();
	assertEquals(message, arity, is.getArity());
	for (int i = 0; i < arity; i++) {
		Object copiedValue = should.getField(i);
		Object element = is.getField(i);
		assertEquals(message, element, copiedValue);
	}
}
 
Example 16
Source File: RowUtil.java    From Alink with Apache License 2.0 5 votes vote down vote up
/**
 * merge obj and row, return a new row.
 */
public static Row merge(Object obj, Row rec1) {
    int n1 = rec1.getArity();
    Row ret = new Row(n1 + 1);
    ret.setField(0, obj);
    for (int i = 0; i < n1; ++i) {
        ret.setField(i + 1, rec1.getField(i));
    }
    return ret;
}
 
Example 17
Source File: RowOperationMapperTest.java    From bahir-flink with Apache License 2.0 5 votes vote down vote up
@Test
void testGetField() {
    RowOperationMapper mapper = new RowOperationMapper(KuduTestBase.columns, AbstractSingleOperationMapper.KuduOperation.INSERT);
    Row inputRow = KuduTestBase.booksDataRow().get(0);

    for (int i = 0; i < inputRow.getArity(); i++) {
        Assertions.assertEquals(inputRow.getField(i), mapper.getField(inputRow, i));
    }
}
 
Example 18
Source File: TypeExtractor.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings({ "unchecked", "rawtypes" })
private <X> TypeInformation<X> privateGetForObject(X value) {
	checkNotNull(value);

	// check if type information can be produced using a factory
	final ArrayList<Type> typeHierarchy = new ArrayList<>();
	typeHierarchy.add(value.getClass());
	final TypeInformation<X> typeFromFactory = createTypeInfoFromFactory(value.getClass(), typeHierarchy, null, null);
	if (typeFromFactory != null) {
		return typeFromFactory;
	}

	// check if we can extract the types from tuples, otherwise work with the class
	if (value instanceof Tuple) {
		Tuple t = (Tuple) value;
		int numFields = t.getArity();
		if(numFields != countFieldsInClass(value.getClass())) {
			// not a tuple since it has more fields.
			return analyzePojo((Class<X>) value.getClass(), new ArrayList<Type>(), null, null, null); // we immediately call analyze Pojo here, because
			// there is currently no other type that can handle such a class.
		}

		TypeInformation<?>[] infos = new TypeInformation[numFields];
		for (int i = 0; i < numFields; i++) {
			Object field = t.getField(i);

			if (field == null) {
				throw new InvalidTypesException("Automatic type extraction is not possible on candidates with null values. "
						+ "Please specify the types directly.");
			}

			infos[i] = privateGetForObject(field);
		}
		return new TupleTypeInfo(value.getClass(), infos);
	}
	else if (value instanceof Row) {
		Row row = (Row) value;
		int arity = row.getArity();
		for (int i = 0; i < arity; i++) {
			if (row.getField(i) == null) {
				LOG.warn("Cannot extract type of Row field, because of Row field[" + i + "] is null. " +
					"Should define RowTypeInfo explicitly.");
				return privateGetForClass((Class<X>) value.getClass(), new ArrayList<Type>());
			}
		}
		TypeInformation<?>[] typeArray = new TypeInformation<?>[arity];
		for (int i = 0; i < arity; i++) {
			typeArray[i] = TypeExtractor.getForObject(row.getField(i));
		}
		return (TypeInformation<X>) new RowTypeInfo(typeArray);
	}
	else {
		return privateGetForClass((Class<X>) value.getClass(), new ArrayList<Type>());
	}
}
 
Example 19
Source File: OneVsRestModelMapper.java    From Alink with Apache License 2.0 4 votes vote down vote up
@Override
public void loadModel(List<Row> modelRows) {
    Params meta = extractMeta(modelRows);
    int numClasses = meta.get(ModelParamName.NUM_CLASSES);
    String labelsStr = meta.get(ModelParamName.LABELS);
    String labelTypeName = meta.get(ModelParamName.LABEL_TYPE_NAME);
    String binClsClassName = meta.get(ModelParamName.BIN_CLS_CLASS_NAME);
    String[] modelColNames = meta.get(ModelParamName.MODEL_COL_NAMES);
    Integer[] modelColTypesInt = meta.get(ModelParamName.MODEL_COL_TYPES);
    TypeInformation[] modelColTypes = new TypeInformation[modelColTypesInt.length];
    for (int i = 0; i < modelColTypesInt.length; i++) {
        modelColTypes[i] = JdbcTypeConverter.getFlinkType(modelColTypesInt[i]);
    }
    this.predictors = new ArrayList<>(numClasses);
    this.labels = gson.fromJson(labelsStr, ArrayList.class);
    recoverLabelType(labels, labelTypeName);

    try {
        for (int i = 0; i < numClasses; i++) {
            List<Row> rows = new ArrayList<Row>();
            for (Row row : modelRows) {
                if (row.getField(2) == null) {
                    continue;
                }
                long id = (Long) row.getField(2);
                if ((long) (i) == id) {
                    Row subRow = new Row(row.getArity() - 4);
                    for (int j = 0; j < subRow.getArity(); j++) {
                        subRow.setField(j, row.getField(3 + j));
                    }
                    rows.add(subRow);
                }
            }
            TableSchema schema = new TableSchema(modelColNames, modelColTypes);
            RichModelMapper predictor = createModelPredictor(binClsClassName, schema, getDataSchema(), binClsPredParams,
                rows);
            this.predictors.add(predictor);
        }
    } catch (Exception e) {
        throw new RuntimeException(e);
    }
}
 
Example 20
Source File: FtrlTrainStreamOp.java    From Alink with Apache License 2.0 4 votes vote down vote up
@Override
public void flatMap(Tuple7<Long, Integer, Integer, Vector, Object, Double, Long> value, Collector<Row> out)
    throws Exception {

    LinearModelData modelData = new LinearModelData();
    modelData.coefVector = (DenseVector)value.f3;
    modelData.hasInterceptItem = this.hasInterceptItem;
    modelData.vectorColName = this.vectorColName;
    modelData.modelName = "Logistic Regression";
    modelData.featureNames = this.featureCols;
    modelData.labelValues = (Object[])value.f4;
    modelData.vectorSize = hasInterceptItem ? modelData.coefVector.size() - 1 : modelData.coefVector.size();
    modelData.linearModelType = LinearModelType.LR;

    RowCollector listCollector = new RowCollector();
    new LinearModelDataConverter().save(modelData, listCollector);
    List<Row> rows = listCollector.getRows();

    for (Row r : rows) {
        int rowSize = r.getArity();
        Row row = new Row(rowSize + 2);
        row.setField(0, iter);
        row.setField(1, (long) rows.size());

        for (int j = 0; j < rowSize; ++j) {
            if (j == 2 && r.getField(j) != null) {
                if (type.equals(StringTypeEnum.BIGINT) || type.equals(StringTypeEnum.LONG)) {
                    row.setField(2 + j, Double.valueOf(r.getField(j).toString()).longValue());
                } else if (type.equals(StringTypeEnum.INT) || type.equals(StringTypeEnum.INTEGER)) {
                    row.setField(2 + j, Double.valueOf(r.getField(j).toString()).intValue());
                } else if (type.equals(StringTypeEnum.DOUBLE)) {
                    row.setField(2 + j, Double.valueOf(r.getField(j).toString()));
                } else if (type.equals(StringTypeEnum.FLOAT)) {
                    row.setField(2 + j, Double.valueOf(r.getField(j).toString()).floatValue());
                } else {
                    row.setField(2 + j, r.getField(j));
                }
            } else {
                row.setField(2 + j, r.getField(j));
            }
        }
        out.collect(row);
    }

    iter++;
}