org.apache.flink.api.java.tuple.Tuple3 Java Examples
The following examples show how to use
org.apache.flink.api.java.tuple.Tuple3.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: WindowTranslationTest.java From Flink-CEPplus with Apache License 2.0 | 7 votes |
@Test @SuppressWarnings("rawtypes") public void testFoldWithCustomTrigger() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setStreamTimeCharacteristic(TimeCharacteristic.IngestionTime); DataStream<Tuple2<String, Integer>> source = env.fromElements(Tuple2.of("hello", 1), Tuple2.of("hello", 2)); DataStream<Tuple3<String, String, Integer>> window1 = source .keyBy(0) .window(SlidingEventTimeWindows.of(Time.of(1, TimeUnit.SECONDS), Time.of(100, TimeUnit.MILLISECONDS))) .trigger(CountTrigger.of(1)) .fold(new Tuple3<>("", "", 1), new DummyFolder()); OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>> transform = (OneInputTransformation<Tuple2<String, Integer>, Tuple3<String, String, Integer>>) window1.getTransformation(); OneInputStreamOperator<Tuple2<String, Integer>, Tuple3<String, String, Integer>> operator = transform.getOperator(); Assert.assertTrue(operator instanceof WindowOperator); WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?> winOperator = (WindowOperator<String, Tuple2<String, Integer>, ?, ?, ?>) operator; Assert.assertTrue(winOperator.getTrigger() instanceof CountTrigger); Assert.assertTrue(winOperator.getWindowAssigner() instanceof SlidingEventTimeWindows); Assert.assertTrue(winOperator.getStateDescriptor() instanceof FoldingStateDescriptor); processElementAndEnsureOutput(winOperator, winOperator.getKeySelector(), BasicTypeInfo.STRING_TYPE_INFO, new Tuple2<>("hello", 1)); }
Example #2
Source File: UnionITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testUnion5IdenticalDataSets() throws Exception { /* * Union of 5 same Data Sets, with multiple unions */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> unionDs = ds.union(CollectionDataSets.get3TupleDataSet(env)) .union(CollectionDataSets.get3TupleDataSet(env)) .union(CollectionDataSets.get3TupleDataSet(env)) .union(CollectionDataSets.get3TupleDataSet(env)); List<Tuple3<Integer, Long, String>> result = unionDs.collect(); String expected = FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING; compareResultAsTuples(result, expected); }
Example #3
Source File: UnionITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testUnion2IdenticalDataSets() throws Exception { /* * Union of 2 Same Data Sets */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> unionDs = ds.union(CollectionDataSets.get3TupleDataSet(env)); List<Tuple3<Integer, Long, String>> result = unionDs.collect(); String expected = FULL_TUPLE_3_STRING + FULL_TUPLE_3_STRING; compareResultAsTuples(result, expected); }
Example #4
Source File: BucketingSinkTestProgram.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public Tuple4<Integer, Long, Integer, String> map(Tuple3<Integer, Long, String> value) throws IOException { // update counter Integer counterValue = counter.value(); if (counterValue == null) { counterValue = 0; } counter.update(counterValue + 1); // save last value Long lastValue = last.value(); if (lastValue == null) { lastValue = initialValue; } last.update(value.f1); return Tuple4.of(value.f0, value.f1 - lastValue, counterValue, value.f2); }
Example #5
Source File: SemanticPropertiesTranslationTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testUnaryFunctionForwardedInLine2() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); @SuppressWarnings("unchecked") DataSet<Tuple3<Long, Long, Long>> input = env.fromElements(new Tuple3<Long, Long, Long>(3L, 2L, 1L)); input.map(new ReadSetMapper<Tuple3<Long, Long, Long>>()).withForwardedFields("0->1; 2") .output(new DiscardingOutputFormat<Tuple3<Long, Long, Long>>()); Plan plan = env.createProgramPlan(); GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next(); MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput(); SingleInputSemanticProperties semantics = mapper.getSemanticProperties(); FieldSet fw1 = semantics.getForwardingTargetFields(0, 0); FieldSet fw2 = semantics.getForwardingTargetFields(0, 2); assertNotNull(fw1); assertNotNull(fw2); assertTrue(fw1.contains(1)); assertTrue(fw2.contains(2)); }
Example #6
Source File: FilterITCase.java From flink with Apache License 2.0 | 6 votes |
@Test public void testAllRejectingFilter() throws Exception { /* * Test all-rejecting filter. */ final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); DataSet<Tuple3<Integer, Long, String>> filterDs = ds. filter(new Filter1()); List<Tuple3<Integer, Long, String>> result = filterDs.collect(); String expected = "\n"; compareResultAsTuples(result, expected); }
Example #7
Source File: AdamicAdar.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Override public void reduce(Iterable<Tuple3<T, T, FloatValue>> values, Collector<Result<T>> out) throws Exception { double sum = 0; Tuple3<T, T, FloatValue> edge = null; for (Tuple3<T, T, FloatValue> next : values) { edge = next; sum += next.f2.getValue(); } if (sum >= minimumScore) { output.setVertexId0(edge.f0); output.setVertexId1(edge.f1); output.setAdamicAdarScore((float) sum); out.collect(output); } }
Example #8
Source File: PrefixSpanBatchOp.java From Alink with Apache License 2.0 | 6 votes |
/** * Encode the sequence patterns. */ private static Tuple3<String, Long, Long> encodeSequence(int[] sequence, String[] indexToString) { StringBuilder sbd = new StringBuilder(); int itemSetSize = 0; long chainLength = 1L; long itemCount = 0L; for (int i = 1; i < sequence.length - 1; i++) { if (sequence[i] == 0) { sbd.append(ELEMENT_SEPARATOR); chainLength++; itemSetSize = 0; } else { if (itemSetSize > 0) { sbd.append(ITEM_SEPARATOR); } sbd.append(indexToString[sequence[i]]); itemSetSize++; itemCount++; } } return Tuple3.of(sbd.toString(), itemCount, chainLength); }
Example #9
Source File: ReduceWithCombinerITCase.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testForkingReduceOnKeyedDataset() throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(4); // creates the input data and distributes them evenly among the available downstream tasks DataSet<Tuple3<String, Integer, Boolean>> input = createKeyedInput(env); UnsortedGrouping<Tuple3<String, Integer, Boolean>> counts = input.groupBy(0); DataSet<Tuple3<String, Integer, Boolean>> r1 = counts.reduceGroup(new KeyedCombReducer()); DataSet<Tuple3<String, Integer, Boolean>> r2 = counts.reduceGroup(new KeyedGroupCombReducer()); List<Tuple3<String, Integer, Boolean>> actual = r1.union(r2).collect(); String expected = "k1,6,true\n" + "k2,4,true\n" + "k1,6,true\n" + "k2,4,true\n"; compareResultAsTuples(actual, expected); }
Example #10
Source File: EdgeDegreesPairTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testWithSimpleGraph() throws Exception { String expectedResult = "(0,1,((null),(2,2,0),(3,0,3)))\n" + "(0,2,((null),(2,2,0),(3,2,1)))\n" + "(2,1,((null),(3,2,1),(3,0,3)))\n" + "(2,3,((null),(3,2,1),(4,2,2)))\n" + "(3,1,((null),(4,2,2),(3,0,3)))\n" + "(3,4,((null),(4,2,2),(1,0,1)))\n" + "(5,3,((null),(1,1,0),(4,2,2)))"; DataSet<Edge<IntValue, Tuple3<NullValue, Degrees, Degrees>>> degreesPair = directedSimpleGraph .run(new EdgeDegreesPair<>()); TestBaseUtils.compareResultAsText(degreesPair.collect(), expectedResult); }
Example #11
Source File: GroupReduceOperator.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@SuppressWarnings("unchecked") private static <IN, OUT, K1, K2> PlanUnwrappingSortedReduceGroupOperator<IN, OUT, K1, K2> translateSelectorFunctionSortedReducer( SelectorFunctionKeys<IN, ?> rawGroupingKey, SelectorFunctionKeys<IN, ?> rawSortingKey, Ordering groupOrdering, GroupReduceFunction<IN, OUT> function, TypeInformation<OUT> outputType, String name, Operator<IN> input, boolean combinable) { final SelectorFunctionKeys<IN, K1> groupingKey = (SelectorFunctionKeys<IN, K1>) rawGroupingKey; final SelectorFunctionKeys<IN, K2> sortingKey = (SelectorFunctionKeys<IN, K2>) rawSortingKey; TypeInformation<Tuple3<K1, K2, IN>> typeInfoWithKey = KeyFunctions.createTypeWithKey(groupingKey, sortingKey); Operator<Tuple3<K1, K2, IN>> inputWithKey = KeyFunctions.appendKeyExtractor(input, groupingKey, sortingKey); PlanUnwrappingSortedReduceGroupOperator<IN, OUT, K1, K2> reducer = new PlanUnwrappingSortedReduceGroupOperator<>( function, groupingKey, sortingKey, name, outputType, typeInfoWithKey, combinable); reducer.setInput(inputWithKey); reducer.setGroupOrder(groupOrdering); return reducer; }
Example #12
Source File: SelectorFunctionKeysTest.java From Flink-CEPplus with Apache License 2.0 | 6 votes |
@Test public void testAreCompatible2() throws Keys.IncompatibleKeysException { TypeInformation<PojoWithMultiplePojos> t1 = TypeExtractor.getForClass(PojoWithMultiplePojos.class); TypeInformation<Tuple3<Long, Pojo1, Integer>> t2 = new TupleTypeInfo<>( BasicTypeInfo.LONG_TYPE_INFO, TypeExtractor.getForClass(Pojo1.class), BasicTypeInfo.INT_TYPE_INFO); TypeInformation<Tuple2<Integer, String>> kt = new TupleTypeInfo<>( BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO ); Keys<PojoWithMultiplePojos> k1 = new Keys.SelectorFunctionKeys<>( new KeySelector3(), t1, kt ); Keys<Tuple3<Long, Pojo1, Integer>> k2 = new Keys.SelectorFunctionKeys<>( new KeySelector4(), t2, kt ); Assert.assertTrue(k1.areCompatible(k2)); Assert.assertTrue(k2.areCompatible(k1)); }
Example #13
Source File: ScatterGatherIteration.java From flink with Apache License 2.0 | 6 votes |
@Override public void coGroup(Iterable<Edge<K, EV>> edges, Iterable<Vertex<K, Tuple3<VV, LongValue, LongValue>>> state, Collector<Tuple2<K, Message>> out) throws Exception { final Iterator<Vertex<K, Tuple3<VV, LongValue, LongValue>>> stateIter = state.iterator(); if (stateIter.hasNext()) { Vertex<K, Tuple3<VV, LongValue, LongValue>> vertexWithDegrees = stateIter.next(); nextVertex.f0 = vertexWithDegrees.f0; nextVertex.f1 = vertexWithDegrees.f1.f0; scatterFunction.setInDegree(vertexWithDegrees.f1.f1.getValue()); scatterFunction.setOutDegree(vertexWithDegrees.f1.f2.getValue()); scatterFunction.set(edges.iterator(), out, vertexWithDegrees.getId()); scatterFunction.sendMessages(nextVertex); } }
Example #14
Source File: DocCountVectorizerTrainBatchOp.java From Alink with Apache License 2.0 | 6 votes |
@Override public void mapPartition(Iterable<Tuple2<Long, Row>> iterable, Collector<DocCountVectorizerModelData> collector) throws Exception { List<String> data = new ArrayList<>(); Tuple3<String, Double, Integer> feature = Tuple3.of(null, null, null); for (Tuple2<Long, Row> tuple : iterable) { Row row = tuple.f1; feature.f0 = row.getField(0).toString(); feature.f1 = ((Number)row.getField(2)).doubleValue(); feature.f2 = tuple.f0.intValue(); data.add(JsonConverter.toJson(feature)); } DocCountVectorizerModelData modelData = new DocCountVectorizerModelData(); modelData.featureType = featureType; modelData.minTF = minTF; modelData.list = data; collector.collect(modelData); // new DocCountVectorizerModelDataConverter().save(modelData, collector); }
Example #15
Source File: WindowOperatorMigrationTest.java From flink with Apache License 2.0 | 6 votes |
@Override public int compare(Object o1, Object o2) { if (o1 instanceof Watermark || o2 instanceof Watermark) { return 0; } else { StreamRecord<Tuple3<String, Long, Long>> sr0 = (StreamRecord<Tuple3<String, Long, Long>>) o1; StreamRecord<Tuple3<String, Long, Long>> sr1 = (StreamRecord<Tuple3<String, Long, Long>>) o2; if (sr0.getTimestamp() != sr1.getTimestamp()) { return (int) (sr0.getTimestamp() - sr1.getTimestamp()); } int comparison = sr0.getValue().f0.compareTo(sr1.getValue().f0); if (comparison != 0) { return comparison; } else { comparison = (int) (sr0.getValue().f1 - sr1.getValue().f1); if (comparison != 0) { return comparison; } return (int) (sr0.getValue().f1 - sr1.getValue().f1); } } }
Example #16
Source File: GraphLoader.java From OSTMap with Apache License 2.0 | 6 votes |
private DataSet<Tuple3<String, String, UserEdgeValues>> getUserEdges(DataSet<JSONObject> jsonData) { DataSet<Tuple3<String, String, UserEdgeValues>> userEdges = jsonData.flatMap(new FlatMapFunction<JSONObject, Tuple3<String, String, UserEdgeValues>>() { @Override public void flatMap(JSONObject jsonObject, Collector<Tuple3<String, String, UserEdgeValues>> out) throws Exception { // count initialized to 1 int count = 1; // from the current node JSONObject user = jsonObject.getJSONObject("user"); String from = user.getString("id_str"); // to other nodes JSONObject entities = jsonObject.getJSONObject("entities"); JSONArray userMentions = entities.getJSONArray("user_mentions"); for (int i = 0; i < userMentions.length(); i++) { JSONObject current = userMentions.getJSONObject(i); String to = current.getString("id_str"); out.collect(new Tuple3<String, String, UserEdgeValues>(from, to, new UserEdgeValues(count))); } return; } }); return userEdges; }
Example #17
Source File: SelectorFunctionKeysTest.java From flink with Apache License 2.0 | 6 votes |
@Test public void testAreCompatible2() throws Keys.IncompatibleKeysException { TypeInformation<PojoWithMultiplePojos> t1 = TypeExtractor.getForClass(PojoWithMultiplePojos.class); TypeInformation<Tuple3<Long, Pojo1, Integer>> t2 = new TupleTypeInfo<>( BasicTypeInfo.LONG_TYPE_INFO, TypeExtractor.getForClass(Pojo1.class), BasicTypeInfo.INT_TYPE_INFO); TypeInformation<Tuple2<Integer, String>> kt = new TupleTypeInfo<>( BasicTypeInfo.INT_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO ); Keys<PojoWithMultiplePojos> k1 = new Keys.SelectorFunctionKeys<>( new KeySelector3(), t1, kt ); Keys<Tuple3<Long, Pojo1, Integer>> k2 = new Keys.SelectorFunctionKeys<>( new KeySelector4(), t2, kt ); Assert.assertTrue(k1.areCompatible(k2)); Assert.assertTrue(k2.areCompatible(k1)); }
Example #18
Source File: FormatTransMapper.java From Alink with Apache License 2.0 | 5 votes |
@Override public void open() { Tuple2<FormatReader, String[]> t2From = initFormatReader(super.getDataSchema(), params); this.formatReader = t2From.f0; String[] fromColNames = t2From.f1; Tuple3<FormatWriter, String[], TypeInformation[]> t3To = initFormatWriter(params, fromColNames); formatWriter = t3To.f0; }
Example #19
Source File: TypeExtractorTest.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
@SuppressWarnings({ "rawtypes", "unchecked" }) @Test public void testSqlTimeTypes() { MapFunction<?, ?> function = new MapFunction<Tuple3<Date, Time, Timestamp>, Tuple3<Date, Time, Timestamp>>() { @Override public Tuple3<Date, Time, Timestamp> map(Tuple3<Date, Time, Timestamp> value) throws Exception { return null; } }; TypeInformation<?> ti = TypeExtractor.getMapReturnTypes( function, (TypeInformation) TypeInformation.of(new TypeHint<Tuple3<Date, Time, Timestamp>>() { })); Assert.assertTrue(ti.isTupleType()); TupleTypeInfo<?> tti = (TupleTypeInfo<?>) ti; Assert.assertEquals(SqlTimeTypeInfo.DATE, tti.getTypeAt(0)); Assert.assertEquals(SqlTimeTypeInfo.TIME, tti.getTypeAt(1)); Assert.assertEquals(SqlTimeTypeInfo.TIMESTAMP, tti.getTypeAt(2)); // use getForClass() Assert.assertEquals(tti.getTypeAt(0), TypeExtractor.getForClass(Date.class)); Assert.assertEquals(tti.getTypeAt(1), TypeExtractor.getForClass(Time.class)); Assert.assertEquals(tti.getTypeAt(2), TypeExtractor.getForClass(Timestamp.class)); // use getForObject() Assert.assertEquals(SqlTimeTypeInfo.DATE, TypeExtractor.getForObject(Date.valueOf("1998-12-12"))); Assert.assertEquals(SqlTimeTypeInfo.TIME, TypeExtractor.getForObject(Time.valueOf("12:37:45"))); Assert.assertEquals(SqlTimeTypeInfo.TIMESTAMP, TypeExtractor.getForObject(Timestamp.valueOf("1998-12-12 12:37:45"))); }
Example #20
Source File: CoGroupCustomPartitioningTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCoGroupWithTuples() { try { final Partitioner<Long> partitioner = new TestPartitionerLong(); ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple2<Long, Long>> input1 = env.fromElements(new Tuple2<Long, Long>(0L, 0L)); DataSet<Tuple3<Long, Long, Long>> input2 = env.fromElements(new Tuple3<Long, Long, Long>(0L, 0L, 0L)); input1 .coGroup(input2) .where(1).equalTo(0) .withPartitioner(partitioner) .with(new DummyCoGroupFunction<Tuple2<Long, Long>, Tuple3<Long, Long, Long>>()) .output(new DiscardingOutputFormat<Tuple2<Tuple2<Long, Long>, Tuple3<Long, Long, Long>>>()); Plan p = env.createProgramPlan(); OptimizedPlan op = compileNoStats(p); SinkPlanNode sink = op.getDataSinks().iterator().next(); DualInputPlanNode join = (DualInputPlanNode) sink.getInput().getSource(); assertEquals(ShipStrategyType.PARTITION_CUSTOM, join.getInput1().getShipStrategy()); assertEquals(ShipStrategyType.PARTITION_CUSTOM, join.getInput2().getShipStrategy()); assertEquals(partitioner, join.getInput1().getPartitioner()); assertEquals(partitioner, join.getInput2().getPartitioner()); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } }
Example #21
Source File: KafkaShuffleTestBase.java From flink with Apache License 2.0 | 5 votes |
@Override public Tuple3<Integer, Long, Integer> map(Tuple3<Integer, Long, Integer> element) throws Exception { counter++; if (counter > totalCount) { throw new Exception("Error: number of elements more than expected"); } return element; }
Example #22
Source File: TriangleListing.java From flink with Apache License 2.0 | 5 votes |
@Override public Result<T> join(Tuple3<T, T, T> triplet, Tuple2<T, T> edge) throws Exception { output.setVertexId0(triplet.f0); output.setVertexId1(triplet.f1); output.setVertexId2(triplet.f2); return output; }
Example #23
Source File: SemanticPropertiesPrecedenceTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testFunctionApiPrecedence() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); @SuppressWarnings("unchecked") DataSet<Tuple3<Long, String, Integer>> input = env.fromElements(Tuple3.of(3L, "test", 42)); input .map(new WildcardForwardedMapper<Tuple3<Long, String, Integer>>()) .withForwardedFields("f0") .output(new DiscardingOutputFormat<Tuple3<Long, String, Integer>>()); Plan plan = env.createProgramPlan(); GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next(); MapOperatorBase<?, ?, ?> mapper = (MapOperatorBase<?, ?, ?>) sink.getInput(); SingleInputSemanticProperties semantics = mapper.getSemanticProperties(); FieldSet fw1 = semantics.getForwardingTargetFields(0, 0); FieldSet fw2 = semantics.getForwardingTargetFields(0, 1); FieldSet fw3 = semantics.getForwardingTargetFields(0, 2); assertNotNull(fw1); assertNotNull(fw2); assertNotNull(fw3); assertTrue(fw1.contains(0)); assertFalse(fw2.contains(1)); assertFalse(fw3.contains(2)); }
Example #24
Source File: SampleITCase.java From flink with Apache License 2.0 | 5 votes |
private void verifySamplerWithFixedSize(boolean withReplacement, int numSamples, long seed) throws Exception { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); FlatMapOperator<Tuple3<Integer, Long, String>, String> ds = getSourceDataSet(env); DataSet<String> sampled = DataSetUtils.sampleWithSize(ds, withReplacement, numSamples, seed); List<String> result = sampled.collect(); assertEquals(numSamples, result.size()); containsResultAsText(result, getSourceStrings()); }
Example #25
Source File: CassandraConnectorITCase.java From flink with Apache License 2.0 | 5 votes |
@Test public void testCassandraTupleAtLeastOnceSink() throws Exception { CassandraTupleSink<Tuple3<String, Integer, Integer>> sink = new CassandraTupleSink<>(injectTableName(INSERT_DATA_QUERY), builder); try { sink.open(new Configuration()); for (Tuple3<String, Integer, Integer> value : collection) { sink.send(value); } } finally { sink.close(); } ResultSet rs = session.execute(injectTableName(SELECT_DATA_QUERY)); Assert.assertEquals(20, rs.all().size()); }
Example #26
Source File: PipelineCandidatesGrid.java From Alink with Apache License 2.0 | 5 votes |
@Override public Tuple2<Pipeline, List<Tuple3<Integer, ParamInfo, Object>>> get( int index, List <Double> experienceScores) throws CloneNotSupportedException { ArrayList <Tuple3 <Integer, ParamInfo, Object>> paramList = new ArrayList <>(); for (int i = this.dim - 1; i >= 0; i--) { int k = index / this.counts[i]; index = index % this.counts[i]; Tuple3 <Integer, ParamInfo, Object[]> t3 = this.items.get(i); paramList.add(new Tuple3 <>(t3.f0, t3.f1, t3.f2[k])); } Pipeline pipelineClone = this.pipeline.clone(); updatePipelineParams(pipelineClone, paramList); return Tuple2.of(pipelineClone, paramList); }
Example #27
Source File: UnaryObjFuncTest.java From Alink with Apache License 2.0 | 5 votes |
@Test public void calcConstraintSearchValues() throws Exception { UnaryLossObjFunc objfunc = new UnaryLossObjFunc(new LogLossFunc(), new Params().set(HasL1.L_1, 0.1) .set(HasL2.L_2, 0.1)); List<Tuple3<Double, Double, Vector>> labelVectors = new ArrayList<>(); DenseVector coef = new DenseVector(FEATURE_DIM); DenseVector dirVec = new DenseVector(FEATURE_DIM); for (int i = 0; i < FEATURE_DIM; ++i) { coef.set(i, 1.0 + 0.5 * i); dirVec.set(i, (i + 0.1)); } for (int i = 0; i < 100; ++i) { DenseVector tmp = new DenseVector(FEATURE_DIM); for (int j = 0; j < FEATURE_DIM; ++j) { tmp.set(j, j + 0.1 * i); } labelVectors.add(Tuple3.of(1.0, 1.0 - i % 2, tmp)); } double[] trueConstValues = new double[] { 34.65735902799723, 34.65735902799723, 37.163138901642334, 39.847755908478675, 40.14874960561338, 40.48932608213041, 40.87755215903219, 41.323690786508514, 41.84091851878743, 42.44629788093937}; double[] constraintLosses = objfunc.constraintCalcSearchValues( labelVectors, coef, dirVec, 0.5, 10); for (int i = 0; i < FEATURE_DIM; ++i) { assertEquals(constraintLosses[i], trueConstValues[i], EPS); } }
Example #28
Source File: SemanticPropertiesProjectionTest.java From flink with Apache License 2.0 | 5 votes |
@Test public void testProjectionSemProps2() { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); DataSet<Tuple4<Integer, Tuple3<String, Integer, Long>, Tuple2<Long, Long>, String>> tupleDs = env.fromCollection(emptyNestedTupleData, nestedTupleTypeInfo); tupleDs.project(2, 3, 1, 2).output(new DiscardingOutputFormat<Tuple>()); Plan plan = env.createProgramPlan(); GenericDataSinkBase<?> sink = plan.getDataSinks().iterator().next(); PlanProjectOperator<?, ?> projectOperator = ((PlanProjectOperator<?, ?>) sink.getInput()); SingleInputSemanticProperties props = projectOperator.getSemanticProperties(); assertNotNull(props.getForwardingTargetFields(0, 0)); assertEquals(1, props.getForwardingTargetFields(0, 1).size()); assertEquals(1, props.getForwardingTargetFields(0, 2).size()); assertEquals(1, props.getForwardingTargetFields(0, 3).size()); assertEquals(2, props.getForwardingTargetFields(0, 4).size()); assertEquals(2, props.getForwardingTargetFields(0, 5).size()); assertEquals(1, props.getForwardingTargetFields(0, 6).size()); assertEquals(0, props.getForwardingTargetFields(0, 0).size()); assertTrue(props.getForwardingTargetFields(0, 4).contains(0)); assertTrue(props.getForwardingTargetFields(0, 5).contains(1)); assertTrue(props.getForwardingTargetFields(0, 6).contains(2)); assertTrue(props.getForwardingTargetFields(0, 1).contains(3)); assertTrue(props.getForwardingTargetFields(0, 2).contains(4)); assertTrue(props.getForwardingTargetFields(0, 3).contains(5)); assertTrue(props.getForwardingTargetFields(0, 4).contains(6)); assertTrue(props.getForwardingTargetFields(0, 5).contains(7)); }
Example #29
Source File: JavaStreamTestData.java From Flink-CEPplus with Apache License 2.0 | 5 votes |
public static DataStream<Tuple3<Integer, Long, String>> getSmall3TupleDataSet(StreamExecutionEnvironment env) { List<Tuple3<Integer, Long, String>> data = new ArrayList<>(); data.add(new Tuple3<>(1, 1L, "Hi")); data.add(new Tuple3<>(2, 2L, "Hello")); data.add(new Tuple3<>(3, 2L, "Hello world")); Collections.shuffle(data); return env.fromCollection(data); }
Example #30
Source File: VectorStandardScalerTrainBatchOp.java From Alink with Apache License 2.0 | 5 votes |
@Override public void flatMap(BaseVectorSummary srt, Collector<Row> collector) throws Exception { if (null != srt) { VectorStandardScalerModelDataConverter converter = new VectorStandardScalerModelDataConverter(); converter.vectorColName = selectedColName; converter.save(Tuple3.of(withMean, withStd, srt), collector); } }