Java Code Examples for org.apache.flink.types.Row#copy()

The following examples show how to use org.apache.flink.types.Row#copy() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: PythonTableFunctionOperatorTest.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public PythonFunctionRunner<Row> createPythonFunctionRunner(
	FnDataReceiver<byte[]> resultReceiver,
	PythonEnvironmentManager pythonEnvironmentManager,
	Map<String, String> jobOptions) {
	return new PassThroughPythonTableFunctionRunner<Row>(resultReceiver) {
		@Override
		public Row copy(Row element) {
			return Row.copy(element);
		}

		@Override
		@SuppressWarnings("unchecked")
		public TypeSerializer<Row> getInputTypeSerializer() {
			return PythonTypeUtils.toFlinkTypeSerializer(userDefinedFunctionInputType);
		}
	};
}
 
Example 2
Source File: GlmUtil.java    From Alink with Apache License 2.0 5 votes vote down vote up
@Override
public Row map(Row row) throws Exception {
    double label = (Double) row.getField(numFeature);
    double weight = (Double) row.getField(numFeature + 1);
    double offset = (Double) row.getField(numFeature + 2);

    double mu = familyLink.getFamily().initialize(label, weight);
    double eta = familyLink.predict(mu) - offset;

    Row outRow = Row.copy(row);
    outRow.setField(numFeature, eta);
    return outRow;
}
 
Example 3
Source File: UpsertWriter.java    From flink with Apache License 2.0 4 votes vote down vote up
public void addRecord(Tuple2<Boolean, Row> record) throws SQLException {
	// we don't need perform a deep copy, because jdbc field are immutable object.
	Tuple2<Boolean, Row> tuple2 = objectReuse ? new Tuple2<>(record.f0, Row.copy(record.f1)) : record;
	// add records to buffer
	keyToRows.put(getPrimaryKey(tuple2.f1), tuple2);
}
 
Example 4
Source File: RandomForestModelMapper.java    From Alink with Apache License 2.0 4 votes vote down vote up
@Override
protected Tuple2<Object, String> predictResultDetail(Row row) throws Exception {
	Node[] root = treeModel.roots;

	Row transRow = Row.copy(row);

	transRow = transRow(transRow);

	int len = root.length;

	Object result = null;
	Map<String, Double> detail = null;

	if (len > 0) {
		LabelCounter labelCounter = new LabelCounter(
			0, 0, new double[root[0].getCounter().getDistributions().length]);

		Predict(transRow, root[0], labelCounter, 1.0);

		for (int i = 1; i < len; ++i) {
			Predict(transRow, root[i], labelCounter, 1.0);
		}

		labelCounter.normWithWeight();

		if (!Criteria.isRegression(treeModel.meta.get(TreeUtil.TREE_TYPE))) {
			detail = new HashMap<>();
			double[] probability = labelCounter.getDistributions();
			double max = 0.0;
			int maxIndex = -1;
			for (int i = 0; i < probability.length; ++i) {
				detail.put(String.valueOf(treeModel.labels[i]), probability[i]);
				if (max < probability[i]) {
					max = probability[i];
					maxIndex = i;
				}
			}

			if (maxIndex == -1) {
				LOG.warn("Can not find the probability: {}", JsonConverter.toJson(probability));
			}

			result = treeModel.labels[maxIndex];
		} else {
			result = labelCounter.getDistributions()[0];
		}
	}

	return new Tuple2<>(result, detail == null ? null : JsonConverter.toJson(detail));
}
 
Example 5
Source File: GbdtModelMapper.java    From Alink with Apache License 2.0 4 votes vote down vote up
@Override
protected Tuple2<Object, String> predictResultDetail(Row row) throws Exception {
	Node[] root = treeModel.roots;

	Row transRow = Row.copy(row);

	transRow = transRow(transRow);

	int len = root.length;

	Object result = null;
	Map<String, Double> detail = null;

	if (len > 0) {
		LabelCounter labelCounter = new LabelCounter(
			0, 0, new double[root[0].getCounter().getDistributions().length]);

		Predict(transRow, root[0], labelCounter, 1.0);

		for (int i = 1; i < len; ++i) {
			if (root[i] != null) {
				Predict(transRow, root[i], labelCounter, 1.0);
			}
		}

		if (algoType == 1) {
			//no need to add "period" for classification

			double p = 1.0 / (1.0 + Math.exp(-labelCounter.getDistributions()[0]));

			if (p >= 0.5) {
				result = treeModel.labels[1];
			} else {
				result = treeModel.labels[0];
			}

			detail = new HashMap<>();
			detail.put(treeModel.labels[0].toString(), 1.0 - p);
			detail.put(treeModel.labels[1].toString(), p);
		} else {
			result = labelCounter.getDistributions()[0] + period;
		}
	}

	return new Tuple2<>(result, detail == null ? null : JsonConverter.gson.toJson(detail));
}