org.apache.flink.api.common.operators.base.BulkIterationBase Java Examples

The following examples show how to use org.apache.flink.api.common.operators.base.BulkIterationBase. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OperatorTranslation.java    From flink with Apache License 2.0 6 votes vote down vote up
private <T> BulkIterationBase<T> translateBulkIteration(BulkIterationResultSet<?> untypedIterationEnd) {
	@SuppressWarnings("unchecked")
	BulkIterationResultSet<T> iterationEnd = (BulkIterationResultSet<T>) untypedIterationEnd;
	IterativeDataSet<T> iterationHead = iterationEnd.getIterationHead();
	BulkIterationBase<T> iterationOperator =
			new BulkIterationBase<>(new UnaryOperatorInformation<>(iterationEnd.getType(), iterationEnd.getType()), "Bulk Iteration");

	if (iterationHead.getParallelism() > 0) {
		iterationOperator.setParallelism(iterationHead.getParallelism());
	}

	translated.put(iterationHead, iterationOperator.getPartialSolution());

	Operator<T> translatedBody = translate(iterationEnd.getNextPartialSolution());
	iterationOperator.setNextPartialSolution(translatedBody);
	iterationOperator.setMaximumNumberOfIterations(iterationHead.getMaxIterations());
	iterationOperator.setInput(translate(iterationHead.getInput()));

	iterationOperator.getAggregators().addAll(iterationHead.getAggregators());

	if (iterationEnd.getTerminationCriterion() != null) {
		iterationOperator.setTerminationCriterion(translate(iterationEnd.getTerminationCriterion()));
	}

	return iterationOperator;
}
 
Example #2
Source File: OperatorTranslation.java    From flink with Apache License 2.0 6 votes vote down vote up
private <T> BulkIterationBase<T> translateBulkIteration(BulkIterationResultSet<?> untypedIterationEnd) {
	@SuppressWarnings("unchecked")
	BulkIterationResultSet<T> iterationEnd = (BulkIterationResultSet<T>) untypedIterationEnd;
	IterativeDataSet<T> iterationHead = iterationEnd.getIterationHead();
	BulkIterationBase<T> iterationOperator =
			new BulkIterationBase<>(new UnaryOperatorInformation<>(iterationEnd.getType(), iterationEnd.getType()), "Bulk Iteration");

	if (iterationHead.getParallelism() > 0) {
		iterationOperator.setParallelism(iterationHead.getParallelism());
	}

	translated.put(iterationHead, iterationOperator.getPartialSolution());

	Operator<T> translatedBody = translate(iterationEnd.getNextPartialSolution());
	iterationOperator.setNextPartialSolution(translatedBody);
	iterationOperator.setMaximumNumberOfIterations(iterationHead.getMaxIterations());
	iterationOperator.setInput(translate(iterationHead.getInput()));

	iterationOperator.getAggregators().addAll(iterationHead.getAggregators());

	if (iterationEnd.getTerminationCriterion() != null) {
		iterationOperator.setTerminationCriterion(translate(iterationEnd.getTerminationCriterion()));
	}

	return iterationOperator;
}
 
Example #3
Source File: OperatorResolver.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public boolean preVisit(Operator<?> visitable) {
	if (this.seen.add(visitable)) {
		// add to  the map
		final String name = visitable.getName();
		List<Operator<?>> list = this.map.get(name);
		if (list == null) {
			list = new ArrayList<Operator<?>>(2);
			this.map.put(name, list);
		}
		list.add(visitable);
		
		// recurse into bulk iterations
		if (visitable instanceof BulkIterationBase) {
			((BulkIterationBase) visitable).getNextPartialSolution().accept(this);
		} else if (visitable instanceof DeltaIterationBase) {
			((DeltaIterationBase) visitable).getSolutionSetDelta().accept(this);
			((DeltaIterationBase) visitable).getNextWorkset().accept(this);
		}
		
		return true;
	} else {
		return false;
	}
}
 
Example #4
Source File: OperatorTranslation.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
private <T> BulkIterationBase<T> translateBulkIteration(BulkIterationResultSet<?> untypedIterationEnd) {
	@SuppressWarnings("unchecked")
	BulkIterationResultSet<T> iterationEnd = (BulkIterationResultSet<T>) untypedIterationEnd;
	IterativeDataSet<T> iterationHead = iterationEnd.getIterationHead();
	BulkIterationBase<T> iterationOperator =
			new BulkIterationBase<>(new UnaryOperatorInformation<>(iterationEnd.getType(), iterationEnd.getType()), "Bulk Iteration");

	if (iterationHead.getParallelism() > 0) {
		iterationOperator.setParallelism(iterationHead.getParallelism());
	}

	translated.put(iterationHead, iterationOperator.getPartialSolution());

	Operator<T> translatedBody = translate(iterationEnd.getNextPartialSolution());
	iterationOperator.setNextPartialSolution(translatedBody);
	iterationOperator.setMaximumNumberOfIterations(iterationHead.getMaxIterations());
	iterationOperator.setInput(translate(iterationHead.getInput()));

	iterationOperator.getAggregators().addAll(iterationHead.getAggregators());

	if (iterationEnd.getTerminationCriterion() != null) {
		iterationOperator.setTerminationCriterion(translate(iterationEnd.getTerminationCriterion()));
	}

	return iterationOperator;
}
 
Example #5
Source File: OperatorResolver.java    From Flink-CEPplus with Apache License 2.0 6 votes vote down vote up
@Override
public boolean preVisit(Operator<?> visitable) {
	if (this.seen.add(visitable)) {
		// add to  the map
		final String name = visitable.getName();
		List<Operator<?>> list = this.map.get(name);
		if (list == null) {
			list = new ArrayList<Operator<?>>(2);
			this.map.put(name, list);
		}
		list.add(visitable);
		
		// recurse into bulk iterations
		if (visitable instanceof BulkIterationBase) {
			((BulkIterationBase) visitable).getNextPartialSolution().accept(this);
		} else if (visitable instanceof DeltaIterationBase) {
			((DeltaIterationBase) visitable).getSolutionSetDelta().accept(this);
			((DeltaIterationBase) visitable).getNextWorkset().accept(this);
		}
		
		return true;
	} else {
		return false;
	}
}
 
Example #6
Source File: OperatorResolver.java    From flink with Apache License 2.0 6 votes vote down vote up
@Override
public boolean preVisit(Operator<?> visitable) {
	if (this.seen.add(visitable)) {
		// add to  the map
		final String name = visitable.getName();
		List<Operator<?>> list = this.map.get(name);
		if (list == null) {
			list = new ArrayList<Operator<?>>(2);
			this.map.put(name, list);
		}
		list.add(visitable);
		
		// recurse into bulk iterations
		if (visitable instanceof BulkIterationBase) {
			((BulkIterationBase) visitable).getNextPartialSolution().accept(this);
		} else if (visitable instanceof DeltaIterationBase) {
			((DeltaIterationBase) visitable).getSolutionSetDelta().accept(this);
			((DeltaIterationBase) visitable).getNextWorkset().accept(this);
		}
		
		return true;
	} else {
		return false;
	}
}
 
Example #7
Source File: BulkIterationNode.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new node for the bulk iteration.
 * 
 * @param iteration The bulk iteration the node represents.
 */
public BulkIterationNode(BulkIterationBase<?> iteration) {
	super(iteration);
	
	if (iteration.getMaximumNumberOfIterations() <= 0) {
		throw new CompilerException("BulkIteration must have a maximum number of iterations specified.");
	}
	
	int numIters = iteration.getMaximumNumberOfIterations();
	
	this.costWeight = (numIters > 0 && numIters < OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT) ?
		numIters : OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT; 
}
 
Example #8
Source File: BulkIterationNode.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new node for the bulk iteration.
 * 
 * @param iteration The bulk iteration the node represents.
 */
public BulkIterationNode(BulkIterationBase<?> iteration) {
	super(iteration);
	
	if (iteration.getMaximumNumberOfIterations() <= 0) {
		throw new CompilerException("BulkIteration must have a maximum number of iterations specified.");
	}
	
	int numIters = iteration.getMaximumNumberOfIterations();
	
	this.costWeight = (numIters > 0 && numIters < OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT) ?
		numIters : OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT; 
}
 
Example #9
Source File: CompilerTestBase.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
@Override
public boolean preVisit(Operator<?> visitable) {

	if(visitable instanceof GenericDataSourceBase) {
		sources.add((GenericDataSourceBase<?, ?>) visitable);
	}
	else if(visitable instanceof BulkIterationBase) {
		((BulkIterationBase<?>) visitable).getNextPartialSolution().accept(this);
	}

	return true;
}
 
Example #10
Source File: CollectionExecutor.java    From flink with Apache License 2.0 5 votes vote down vote up
private List<?> execute(Operator<?> operator, int superStep) throws Exception {
	List<?> result = this.intermediateResults.get(operator);
	
	// if it has already been computed, use the cached variant
	if (result != null) {
		return result;
	}
	
	if (operator instanceof BulkIterationBase) {
		result = executeBulkIteration((BulkIterationBase<?>) operator);
	}
	else if (operator instanceof DeltaIterationBase) {
		result = executeDeltaIteration((DeltaIterationBase<?, ?>) operator);
	}
	else if (operator instanceof SingleInputOperator) {
		result = executeUnaryOperator((SingleInputOperator<?, ?, ?>) operator, superStep);
	}
	else if (operator instanceof DualInputOperator) {
		result = executeBinaryOperator((DualInputOperator<?, ?, ?, ?>) operator, superStep);
	}
	else if (operator instanceof GenericDataSourceBase) {
		result = executeDataSource((GenericDataSourceBase<?, ?>) operator, superStep);
	}
	else if (operator instanceof GenericDataSinkBase) {
		executeDataSink((GenericDataSinkBase<?>) operator, superStep);
		result = Collections.emptyList();
	}
	else {
		throw new RuntimeException("Cannot execute operator " + operator.getClass().getName());
	}
	
	this.intermediateResults.put(operator, result);
	
	return result;
}
 
Example #11
Source File: CompilerTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public boolean preVisit(Operator<?> visitable) {

	if(visitable instanceof GenericDataSourceBase) {
		sources.add((GenericDataSourceBase<?, ?>) visitable);
	}
	else if(visitable instanceof BulkIterationBase) {
		((BulkIterationBase<?>) visitable).getNextPartialSolution().accept(this);
	}

	return true;
}
 
Example #12
Source File: BulkIterationNode.java    From flink with Apache License 2.0 5 votes vote down vote up
/**
 * Creates a new node for the bulk iteration.
 * 
 * @param iteration The bulk iteration the node represents.
 */
public BulkIterationNode(BulkIterationBase<?> iteration) {
	super(iteration);
	
	if (iteration.getMaximumNumberOfIterations() <= 0) {
		throw new CompilerException("BulkIteration must have a maximum number of iterations specified.");
	}
	
	int numIters = iteration.getMaximumNumberOfIterations();
	
	this.costWeight = (numIters > 0 && numIters < OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT) ?
		numIters : OptimizerNode.MAX_DYNAMIC_PATH_COST_WEIGHT; 
}
 
Example #13
Source File: CollectionExecutor.java    From flink with Apache License 2.0 5 votes vote down vote up
private List<?> execute(Operator<?> operator, int superStep) throws Exception {
	List<?> result = this.intermediateResults.get(operator);
	
	// if it has already been computed, use the cached variant
	if (result != null) {
		return result;
	}
	
	if (operator instanceof BulkIterationBase) {
		result = executeBulkIteration((BulkIterationBase<?>) operator);
	}
	else if (operator instanceof DeltaIterationBase) {
		result = executeDeltaIteration((DeltaIterationBase<?, ?>) operator);
	}
	else if (operator instanceof SingleInputOperator) {
		result = executeUnaryOperator((SingleInputOperator<?, ?, ?>) operator, superStep);
	}
	else if (operator instanceof DualInputOperator) {
		result = executeBinaryOperator((DualInputOperator<?, ?, ?, ?>) operator, superStep);
	}
	else if (operator instanceof GenericDataSourceBase) {
		result = executeDataSource((GenericDataSourceBase<?, ?>) operator, superStep);
	}
	else if (operator instanceof GenericDataSinkBase) {
		executeDataSink((GenericDataSinkBase<?>) operator, superStep);
		result = Collections.emptyList();
	}
	else {
		throw new RuntimeException("Cannot execute operator " + operator.getClass().getName());
	}
	
	this.intermediateResults.put(operator, result);
	
	return result;
}
 
Example #14
Source File: CompilerTestBase.java    From flink with Apache License 2.0 5 votes vote down vote up
@Override
public boolean preVisit(Operator<?> visitable) {

	if(visitable instanceof GenericDataSourceBase) {
		sources.add((GenericDataSourceBase<?, ?>) visitable);
	}
	else if(visitable instanceof BulkIterationBase) {
		((BulkIterationBase<?>) visitable).getNextPartialSolution().accept(this);
	}

	return true;
}
 
Example #15
Source File: CollectionExecutor.java    From Flink-CEPplus with Apache License 2.0 5 votes vote down vote up
private List<?> execute(Operator<?> operator, int superStep) throws Exception {
	List<?> result = this.intermediateResults.get(operator);
	
	// if it has already been computed, use the cached variant
	if (result != null) {
		return result;
	}
	
	if (operator instanceof BulkIterationBase) {
		result = executeBulkIteration((BulkIterationBase<?>) operator);
	}
	else if (operator instanceof DeltaIterationBase) {
		result = executeDeltaIteration((DeltaIterationBase<?, ?>) operator);
	}
	else if (operator instanceof SingleInputOperator) {
		result = executeUnaryOperator((SingleInputOperator<?, ?, ?>) operator, superStep);
	}
	else if (operator instanceof DualInputOperator) {
		result = executeBinaryOperator((DualInputOperator<?, ?, ?, ?>) operator, superStep);
	}
	else if (operator instanceof GenericDataSourceBase) {
		result = executeDataSource((GenericDataSourceBase<?, ?>) operator, superStep);
	}
	else if (operator instanceof GenericDataSinkBase) {
		executeDataSink((GenericDataSinkBase<?>) operator, superStep);
		result = Collections.emptyList();
	}
	else {
		throw new RuntimeException("Cannot execute operator " + operator.getClass().getName());
	}
	
	this.intermediateResults.put(operator, result);
	
	return result;
}
 
Example #16
Source File: BulkIterationNode.java    From flink with Apache License 2.0 4 votes vote down vote up
public BulkIterationBase<?> getIterationContract() {
	return (BulkIterationBase<?>) getOperator();
}
 
Example #17
Source File: ChainedTerminationCriterionDriver.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void setup(AbstractInvokable parent) {
	agg = ((IterationRuntimeContext) getUdfRuntimeContext()).getIterationAggregator(BulkIterationBase.TERMINATION_CRITERION_AGGREGATOR_NAME);
}
 
Example #18
Source File: ChainedTerminationCriterionDriver.java    From flink with Apache License 2.0 4 votes vote down vote up
@Override
public void setup(AbstractInvokable parent) {
	agg = ((IterationRuntimeContext) getUdfRuntimeContext()).getIterationAggregator(BulkIterationBase.TERMINATION_CRITERION_AGGREGATOR_NAME);
}
 
Example #19
Source File: CollectionExecutor.java    From flink with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
private <T> List<T> executeBulkIteration(BulkIterationBase<?> iteration) throws Exception {
	Operator<?> inputOp = iteration.getInput();
	if (inputOp == null) {
		throw new InvalidProgramException("The iteration " + iteration.getName() + " has no input (initial partial solution).");
	}
	if (iteration.getNextPartialSolution() == null) {
		throw new InvalidProgramException("The iteration " + iteration.getName() + " has no next partial solution defined (is not closed).");
	}
	
	List<T> inputData = (List<T>) execute(inputOp);
	
	// get the operators that are iterative
	Set<Operator<?>> dynamics = new LinkedHashSet<Operator<?>>();
	DynamicPathCollector dynCollector = new DynamicPathCollector(dynamics);
	iteration.getNextPartialSolution().accept(dynCollector);
	if (iteration.getTerminationCriterion() != null) {
		iteration.getTerminationCriterion().accept(dynCollector);
	}
	
	// register the aggregators
	for (AggregatorWithName<?> a : iteration.getAggregators().getAllRegisteredAggregators()) {
		aggregators.put(a.getName(), a.getAggregator());
	}
	
	String convCriterionAggName = iteration.getAggregators().getConvergenceCriterionAggregatorName();
	ConvergenceCriterion<Value> convCriterion = (ConvergenceCriterion<Value>) iteration.getAggregators().getConvergenceCriterion();
	
	List<T> currentResult = inputData;
	
	final int maxIterations = iteration.getMaximumNumberOfIterations();
	
	for (int superstep = 1; superstep <= maxIterations; superstep++) {
		
		// set the input to the current partial solution
		this.intermediateResults.put(iteration.getPartialSolution(), currentResult);

		// set the superstep number
		iterationSuperstep = superstep;

		// grab the current iteration result
		currentResult = (List<T>) execute(iteration.getNextPartialSolution(), superstep);

		// evaluate the termination criterion
		if (iteration.getTerminationCriterion() != null) {
			execute(iteration.getTerminationCriterion(), superstep);
		}
		
		// evaluate the aggregator convergence criterion
		if (convCriterion != null && convCriterionAggName != null) {
			Value v = aggregators.get(convCriterionAggName).getAggregate();
			if (convCriterion.isConverged(superstep, v)) {
				break;
			}
		}
		
		// clear the dynamic results
		for (Operator<?> o : dynamics) {
			intermediateResults.remove(o);
		}
		
		// set the previous iteration's aggregates and reset the aggregators
		for (Map.Entry<String, Aggregator<?>> e : aggregators.entrySet()) {
			previousAggregates.put(e.getKey(), e.getValue().getAggregate());
			e.getValue().reset();
		}
	}
	
	previousAggregates.clear();
	aggregators.clear();
	
	return currentResult;
}
 
Example #20
Source File: BulkIterationNode.java    From flink with Apache License 2.0 4 votes vote down vote up
public BulkIterationBase<?> getIterationContract() {
	return (BulkIterationBase<?>) getOperator();
}
 
Example #21
Source File: CollectionExecutor.java    From flink with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
private <T> List<T> executeBulkIteration(BulkIterationBase<?> iteration) throws Exception {
	Operator<?> inputOp = iteration.getInput();
	if (inputOp == null) {
		throw new InvalidProgramException("The iteration " + iteration.getName() + " has no input (initial partial solution).");
	}
	if (iteration.getNextPartialSolution() == null) {
		throw new InvalidProgramException("The iteration " + iteration.getName() + " has no next partial solution defined (is not closed).");
	}
	
	List<T> inputData = (List<T>) execute(inputOp);
	
	// get the operators that are iterative
	Set<Operator<?>> dynamics = new LinkedHashSet<Operator<?>>();
	DynamicPathCollector dynCollector = new DynamicPathCollector(dynamics);
	iteration.getNextPartialSolution().accept(dynCollector);
	if (iteration.getTerminationCriterion() != null) {
		iteration.getTerminationCriterion().accept(dynCollector);
	}
	
	// register the aggregators
	for (AggregatorWithName<?> a : iteration.getAggregators().getAllRegisteredAggregators()) {
		aggregators.put(a.getName(), a.getAggregator());
	}
	
	String convCriterionAggName = iteration.getAggregators().getConvergenceCriterionAggregatorName();
	ConvergenceCriterion<Value> convCriterion = (ConvergenceCriterion<Value>) iteration.getAggregators().getConvergenceCriterion();
	
	List<T> currentResult = inputData;
	
	final int maxIterations = iteration.getMaximumNumberOfIterations();
	
	for (int superstep = 1; superstep <= maxIterations; superstep++) {
		
		// set the input to the current partial solution
		this.intermediateResults.put(iteration.getPartialSolution(), currentResult);

		// set the superstep number
		iterationSuperstep = superstep;

		// grab the current iteration result
		currentResult = (List<T>) execute(iteration.getNextPartialSolution(), superstep);

		// evaluate the termination criterion
		if (iteration.getTerminationCriterion() != null) {
			execute(iteration.getTerminationCriterion(), superstep);
		}
		
		// evaluate the aggregator convergence criterion
		if (convCriterion != null && convCriterionAggName != null) {
			Value v = aggregators.get(convCriterionAggName).getAggregate();
			if (convCriterion.isConverged(superstep, v)) {
				break;
			}
		}
		
		// clear the dynamic results
		for (Operator<?> o : dynamics) {
			intermediateResults.remove(o);
		}
		
		// set the previous iteration's aggregates and reset the aggregators
		for (Map.Entry<String, Aggregator<?>> e : aggregators.entrySet()) {
			previousAggregates.put(e.getKey(), e.getValue().getAggregate());
			e.getValue().reset();
		}
	}
	
	previousAggregates.clear();
	aggregators.clear();
	
	return currentResult;
}
 
Example #22
Source File: BulkIterationNode.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
public BulkIterationBase<?> getIterationContract() {
	return (BulkIterationBase<?>) getOperator();
}
 
Example #23
Source File: ChainedTerminationCriterionDriver.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@Override
public void setup(AbstractInvokable parent) {
	agg = ((IterationRuntimeContext) getUdfRuntimeContext()).getIterationAggregator(BulkIterationBase.TERMINATION_CRITERION_AGGREGATOR_NAME);
}
 
Example #24
Source File: CollectionExecutor.java    From Flink-CEPplus with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
private <T> List<T> executeBulkIteration(BulkIterationBase<?> iteration) throws Exception {
	Operator<?> inputOp = iteration.getInput();
	if (inputOp == null) {
		throw new InvalidProgramException("The iteration " + iteration.getName() + " has no input (initial partial solution).");
	}
	if (iteration.getNextPartialSolution() == null) {
		throw new InvalidProgramException("The iteration " + iteration.getName() + " has no next partial solution defined (is not closed).");
	}
	
	List<T> inputData = (List<T>) execute(inputOp);
	
	// get the operators that are iterative
	Set<Operator<?>> dynamics = new LinkedHashSet<Operator<?>>();
	DynamicPathCollector dynCollector = new DynamicPathCollector(dynamics);
	iteration.getNextPartialSolution().accept(dynCollector);
	if (iteration.getTerminationCriterion() != null) {
		iteration.getTerminationCriterion().accept(dynCollector);
	}
	
	// register the aggregators
	for (AggregatorWithName<?> a : iteration.getAggregators().getAllRegisteredAggregators()) {
		aggregators.put(a.getName(), a.getAggregator());
	}
	
	String convCriterionAggName = iteration.getAggregators().getConvergenceCriterionAggregatorName();
	ConvergenceCriterion<Value> convCriterion = (ConvergenceCriterion<Value>) iteration.getAggregators().getConvergenceCriterion();
	
	List<T> currentResult = inputData;
	
	final int maxIterations = iteration.getMaximumNumberOfIterations();
	
	for (int superstep = 1; superstep <= maxIterations; superstep++) {
		
		// set the input to the current partial solution
		this.intermediateResults.put(iteration.getPartialSolution(), currentResult);

		// set the superstep number
		iterationSuperstep = superstep;

		// grab the current iteration result
		currentResult = (List<T>) execute(iteration.getNextPartialSolution(), superstep);

		// evaluate the termination criterion
		if (iteration.getTerminationCriterion() != null) {
			execute(iteration.getTerminationCriterion(), superstep);
		}
		
		// evaluate the aggregator convergence criterion
		if (convCriterion != null && convCriterionAggName != null) {
			Value v = aggregators.get(convCriterionAggName).getAggregate();
			if (convCriterion.isConverged(superstep, v)) {
				break;
			}
		}
		
		// clear the dynamic results
		for (Operator<?> o : dynamics) {
			intermediateResults.remove(o);
		}
		
		// set the previous iteration's aggregates and reset the aggregators
		for (Map.Entry<String, Aggregator<?>> e : aggregators.entrySet()) {
			previousAggregates.put(e.getKey(), e.getValue().getAggregate());
			e.getValue().reset();
		}
	}
	
	previousAggregates.clear();
	aggregators.clear();
	
	return currentResult;
}
 
Example #25
Source File: BulkIterationTranslationTest.java    From flink with Apache License 2.0 2 votes vote down vote up
@Test
public void testCorrectTranslation() {
	final String jobName = "Test JobName";

	final int numIterations = 13;

	final int defaultParallelism = 133;
	final int iterationParallelism = 77;

	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	// ------------ construct the test program ------------------

	{
		env.setParallelism(defaultParallelism);

		@SuppressWarnings("unchecked")
		DataSet<Tuple3<Double, Long, String>> initialDataSet = env.fromElements(new Tuple3<>(3.44, 5L, "abc"));

		IterativeDataSet<Tuple3<Double, Long, String>> bulkIteration = initialDataSet.iterate(numIterations);
		bulkIteration.setParallelism(iterationParallelism);

		// test that multiple iteration consumers are supported
		DataSet<Tuple3<Double, Long, String>> identity = bulkIteration
			.map(new IdentityMapper<Tuple3<Double, Long, String>>());

		DataSet<Tuple3<Double, Long, String>> result = bulkIteration.closeWith(identity);

		result.output(new DiscardingOutputFormat<Tuple3<Double, Long, String>>());
		result.writeAsText("/dev/null");
	}

	Plan p = env.createProgramPlan(jobName);

	// ------------- validate the plan ----------------

	BulkIterationBase<?> iteration = (BulkIterationBase<?>) p.getDataSinks().iterator().next().getInput();

	assertEquals(jobName, p.getJobName());
	assertEquals(defaultParallelism, p.getDefaultParallelism());
	assertEquals(iterationParallelism, iteration.getParallelism());
}
 
Example #26
Source File: BulkIterationTranslationTest.java    From flink with Apache License 2.0 2 votes vote down vote up
@Test
public void testCorrectTranslation() {
	final String jobName = "Test JobName";

	final int numIterations = 13;

	final int defaultParallelism = 133;
	final int iterationParallelism = 77;

	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	// ------------ construct the test program ------------------

	{
		env.setParallelism(defaultParallelism);

		@SuppressWarnings("unchecked")
		DataSet<Tuple3<Double, Long, String>> initialDataSet = env.fromElements(new Tuple3<>(3.44, 5L, "abc"));

		IterativeDataSet<Tuple3<Double, Long, String>> bulkIteration = initialDataSet.iterate(numIterations);
		bulkIteration.setParallelism(iterationParallelism);

		// test that multiple iteration consumers are supported
		DataSet<Tuple3<Double, Long, String>> identity = bulkIteration
			.map(new IdentityMapper<Tuple3<Double, Long, String>>());

		DataSet<Tuple3<Double, Long, String>> result = bulkIteration.closeWith(identity);

		result.output(new DiscardingOutputFormat<Tuple3<Double, Long, String>>());
		result.writeAsText("/dev/null");
	}

	Plan p = env.createProgramPlan(jobName);

	// ------------- validate the plan ----------------

	BulkIterationBase<?> iteration = (BulkIterationBase<?>) p.getDataSinks().iterator().next().getInput();

	assertEquals(jobName, p.getJobName());
	assertEquals(defaultParallelism, p.getDefaultParallelism());
	assertEquals(iterationParallelism, iteration.getParallelism());
}
 
Example #27
Source File: BulkIterationTranslationTest.java    From Flink-CEPplus with Apache License 2.0 2 votes vote down vote up
@Test
public void testCorrectTranslation() {
	final String jobName = "Test JobName";

	final int numIterations = 13;

	final int defaultParallelism = 133;
	final int iterationParallelism = 77;

	ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

	// ------------ construct the test program ------------------

	{
		env.setParallelism(defaultParallelism);

		@SuppressWarnings("unchecked")
		DataSet<Tuple3<Double, Long, String>> initialDataSet = env.fromElements(new Tuple3<>(3.44, 5L, "abc"));

		IterativeDataSet<Tuple3<Double, Long, String>> bulkIteration = initialDataSet.iterate(numIterations);
		bulkIteration.setParallelism(iterationParallelism);

		// test that multiple iteration consumers are supported
		DataSet<Tuple3<Double, Long, String>> identity = bulkIteration
			.map(new IdentityMapper<Tuple3<Double, Long, String>>());

		DataSet<Tuple3<Double, Long, String>> result = bulkIteration.closeWith(identity);

		result.output(new DiscardingOutputFormat<Tuple3<Double, Long, String>>());
		result.writeAsText("/dev/null");
	}

	Plan p = env.createProgramPlan(jobName);

	// ------------- validate the plan ----------------

	BulkIterationBase<?> iteration = (BulkIterationBase<?>) p.getDataSinks().iterator().next().getInput();

	assertEquals(jobName, p.getJobName());
	assertEquals(defaultParallelism, p.getDefaultParallelism());
	assertEquals(iterationParallelism, iteration.getParallelism());
}