Java Code Examples for java.util.Arrays#parallelSetAll()

The following examples show how to use java.util.Arrays#parallelSetAll() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SetAllTest.java    From hottub with GNU General Public License v2.0 5 votes vote down vote up
@Test(dataProvider = "long")
public void testSetAllLong(String name, int size, IntToLongFunction generator, long[] expected) {
    long[] result = new long[size];
    Arrays.setAll(result, generator);
    assertEquals(result, expected, "setAll(long[], IntToLongFunction) case " + name + " failed.");

    // ensure fresh array
    result = new long[size];
    Arrays.parallelSetAll(result, generator);
    assertEquals(result, expected, "parallelSetAll(long[], IntToLongFunction) case " + name + " failed.");
}
 
Example 2
Source File: SetAllTest.java    From openjdk-8 with GNU General Public License v2.0 5 votes vote down vote up
@Test(dataProvider = "string")
public void testSetAllString(String name, int size, IntFunction<String> generator, String[] expected) {
    String[] result = new String[size];
    Arrays.setAll(result, generator);
    assertEquals(result, expected, "setAll(String[], IntFunction<String>) case " + name + " failed.");

    // ensure fresh array
    result = new String[size];
    Arrays.parallelSetAll(result, generator);
    assertEquals(result, expected, "parallelSetAll(String[], IntFunction<String>) case " + name + " failed.");
}
 
Example 3
Source File: SetAllTest.java    From hottub with GNU General Public License v2.0 5 votes vote down vote up
@Test(dataProvider = "int")
public void testSetAllInt(String name, int size, IntUnaryOperator generator, int[] expected) {
    int[] result = new int[size];
    Arrays.setAll(result, generator);
    assertEquals(result, expected, "setAll(int[], IntUnaryOperator) case " + name + " failed.");

    // ensure fresh array
    result = new int[size];
    Arrays.parallelSetAll(result, generator);
    assertEquals(result, expected, "parallelSetAll(int[], IntUnaryOperator) case " + name + " failed.");
}
 
Example 4
Source File: SetAllTest.java    From openjdk-jdk8u-backup with GNU General Public License v2.0 5 votes vote down vote up
@Test(dataProvider = "string")
public void testSetAllString(String name, int size, IntFunction<String> generator, String[] expected) {
    String[] result = new String[size];
    Arrays.setAll(result, generator);
    assertEquals(result, expected, "setAll(String[], IntFunction<String>) case " + name + " failed.");

    // ensure fresh array
    result = new String[size];
    Arrays.parallelSetAll(result, generator);
    assertEquals(result, expected, "parallelSetAll(String[], IntFunction<String>) case " + name + " failed.");
}
 
Example 5
Source File: SetAllTest.java    From jdk8u_jdk with GNU General Public License v2.0 5 votes vote down vote up
@Test(dataProvider = "int")
public void testSetAllInt(String name, int size, IntUnaryOperator generator, int[] expected) {
    int[] result = new int[size];
    Arrays.setAll(result, generator);
    assertEquals(result, expected, "setAll(int[], IntUnaryOperator) case " + name + " failed.");

    // ensure fresh array
    result = new int[size];
    Arrays.parallelSetAll(result, generator);
    assertEquals(result, expected, "parallelSetAll(int[], IntUnaryOperator) case " + name + " failed.");
}
 
Example 6
Source File: SetAllTest.java    From jdk8u60 with GNU General Public License v2.0 5 votes vote down vote up
@Test(dataProvider = "double")
public void testSetAllDouble(String name, int size, IntToDoubleFunction generator, double[] expected) {
    double[] result = new double[size];
    Arrays.setAll(result, generator);
    assertDoubleArrayEquals(result, expected, 0.05, "setAll(double[], IntToDoubleFunction) case " + name + " failed.");

    // ensure fresh array
    result = new double[size];
    Arrays.parallelSetAll(result, generator);
    assertDoubleArrayEquals(result, expected, 0.05, "setAll(double[], IntToDoubleFunction) case " + name + " failed.");
}
 
Example 7
Source File: SetAllTest.java    From hottub with GNU General Public License v2.0 5 votes vote down vote up
@Test(dataProvider = "string")
public void testSetAllString(String name, int size, IntFunction<String> generator, String[] expected) {
    String[] result = new String[size];
    Arrays.setAll(result, generator);
    assertEquals(result, expected, "setAll(String[], IntFunction<String>) case " + name + " failed.");

    // ensure fresh array
    result = new String[size];
    Arrays.parallelSetAll(result, generator);
    assertEquals(result, expected, "parallelSetAll(String[], IntFunction<String>) case " + name + " failed.");
}
 
Example 8
Source File: SetAllTest.java    From jdk8u_jdk with GNU General Public License v2.0 5 votes vote down vote up
@Test(dataProvider = "double")
public void testSetAllDouble(String name, int size, IntToDoubleFunction generator, double[] expected) {
    double[] result = new double[size];
    Arrays.setAll(result, generator);
    assertDoubleArrayEquals(result, expected, 0.05, "setAll(double[], IntToDoubleFunction) case " + name + " failed.");

    // ensure fresh array
    result = new double[size];
    Arrays.parallelSetAll(result, generator);
    assertDoubleArrayEquals(result, expected, 0.05, "setAll(double[], IntToDoubleFunction) case " + name + " failed.");
}
 
Example 9
Source File: PartitionedBlock.java    From systemds with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
public PartitionedBlock(T block, int blen) 
{
	//get the input frame block
	int rlen = block.getNumRows();
	int clen = block.getNumColumns();
	
	//partitioning input broadcast
	_dims = new long[]{rlen, clen};
	_blen = blen;
	int nrblks = getNumRowBlocks();
	int ncblks = getNumColumnBlocks();
	int code = CacheBlockFactory.getCode(block);
	
	try {
		_partBlocks = new CacheBlock[nrblks * ncblks];
		Arrays.parallelSetAll(_partBlocks, index -> {
			int i = index / ncblks;
			int j = index % ncblks;
			T tmp = (T) CacheBlockFactory.newInstance(code);
			return block.slice(i * _blen, Math.min((i + 1) * _blen, rlen) - 1,
				j * _blen, Math.min((j + 1) * _blen, clen) - 1, tmp);
		});
	} catch(Exception ex) {
		throw new RuntimeException("Failed partitioning of broadcast variable input.", ex);
	}

	_offset = 0;
}
 
Example 10
Source File: SetAllTest.java    From jdk8u-jdk with GNU General Public License v2.0 5 votes vote down vote up
@Test(dataProvider = "long")
public void testSetAllLong(String name, int size, IntToLongFunction generator, long[] expected) {
    long[] result = new long[size];
    Arrays.setAll(result, generator);
    assertEquals(result, expected, "setAll(long[], IntToLongFunction) case " + name + " failed.");

    // ensure fresh array
    result = new long[size];
    Arrays.parallelSetAll(result, generator);
    assertEquals(result, expected, "parallelSetAll(long[], IntToLongFunction) case " + name + " failed.");
}
 
Example 11
Source File: SetAllTest.java    From hottub with GNU General Public License v2.0 5 votes vote down vote up
@Test(dataProvider = "double")
public void testSetAllDouble(String name, int size, IntToDoubleFunction generator, double[] expected) {
    double[] result = new double[size];
    Arrays.setAll(result, generator);
    assertDoubleArrayEquals(result, expected, 0.05, "setAll(double[], IntToDoubleFunction) case " + name + " failed.");

    // ensure fresh array
    result = new double[size];
    Arrays.parallelSetAll(result, generator);
    assertDoubleArrayEquals(result, expected, 0.05, "setAll(double[], IntToDoubleFunction) case " + name + " failed.");
}
 
Example 12
Source File: SetAllTest.java    From openjdk-8 with GNU General Public License v2.0 5 votes vote down vote up
@Test(dataProvider = "long")
public void testSetAllLong(String name, int size, IntToLongFunction generator, long[] expected) {
    long[] result = new long[size];
    Arrays.setAll(result, generator);
    assertEquals(result, expected, "setAll(long[], IntToLongFunction) case " + name + " failed.");

    // ensure fresh array
    result = new long[size];
    Arrays.parallelSetAll(result, generator);
    assertEquals(result, expected, "parallelSetAll(long[], IntToLongFunction) case " + name + " failed.");
}
 
Example 13
Source File: SetAllTest.java    From openjdk-jdk8u-backup with GNU General Public License v2.0 5 votes vote down vote up
@Test(dataProvider = "int")
public void testSetAllInt(String name, int size, IntUnaryOperator generator, int[] expected) {
    int[] result = new int[size];
    Arrays.setAll(result, generator);
    assertEquals(result, expected, "setAll(int[], IntUnaryOperator) case " + name + " failed.");

    // ensure fresh array
    result = new int[size];
    Arrays.parallelSetAll(result, generator);
    assertEquals(result, expected, "parallelSetAll(int[], IntUnaryOperator) case " + name + " failed.");
}
 
Example 14
Source File: DenseBlockInt32.java    From systemds with Apache License 2.0 5 votes vote down vote up
@Override
public DenseBlock set(DenseBlock db) {
	double[] data = db.valuesAt(0);
	//TODO investigate potential deadlocks if already in parallel setting w/ commonPool
	Arrays.parallelSetAll(_data, (i) -> UtilFunctions.toInt(data[i]));
	return this;
}
 
Example 15
Source File: SetAllTest.java    From jdk8u-jdk with GNU General Public License v2.0 5 votes vote down vote up
@Test(dataProvider = "string")
public void testSetAllString(String name, int size, IntFunction<String> generator, String[] expected) {
    String[] result = new String[size];
    Arrays.setAll(result, generator);
    assertEquals(result, expected, "setAll(String[], IntFunction<String>) case " + name + " failed.");

    // ensure fresh array
    result = new String[size];
    Arrays.parallelSetAll(result, generator);
    assertEquals(result, expected, "parallelSetAll(String[], IntFunction<String>) case " + name + " failed.");
}
 
Example 16
Source File: SparkExecutionContext.java    From systemds with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
public PartitionedBroadcast<TensorBlock> getBroadcastForTensorObject(TensorObject to) {
	long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;

	PartitionedBroadcast<TensorBlock> bret = null;

	//reuse existing broadcast handle
	if (to.getBroadcastHandle() != null && to.getBroadcastHandle().isPartitionedBroadcastValid()) {
		bret = to.getBroadcastHandle().getPartitionedBroadcast();
	}

	//create new broadcast handle (never created, evicted)
	if (bret == null) {
		//account for overwritten invalid broadcast (e.g., evicted)
		if (to.getBroadcastHandle() != null)
			CacheableData.addBroadcastSize(-to.getBroadcastHandle().getPartitionedBroadcastSize());

		//obtain meta data for matrix
		DataCharacteristics dc = to.getDataCharacteristics();
		long[] dims = dc.getDims();
		int blen = dc.getBlocksize();

		//create partitioned matrix block and release memory consumed by input
		PartitionedBlock<TensorBlock> pmb = new PartitionedBlock<>(to.acquireReadAndRelease(), dims, blen);

		//determine coarse-grained partitioning
		int numPerPart = PartitionedBroadcast.computeBlocksPerPartition(dims, blen);
		int numParts = (int) Math.ceil((double) pmb.getNumRowBlocks() * pmb.getNumColumnBlocks() / numPerPart);
		Broadcast<PartitionedBlock<TensorBlock>>[] ret = new Broadcast[numParts];

		//create coarse-grained partitioned broadcasts
		if (numParts > 1) {
			Arrays.parallelSetAll(ret, i -> createPartitionedBroadcast(pmb, numPerPart, i));
		} else { //single partition
			ret[0] = getSparkContext().broadcast(pmb);
			if (!isLocalMaster())
				pmb.clearBlocks();
		}

		bret = new PartitionedBroadcast<>(ret, to.getDataCharacteristics());
		// create the broadcast handle if the matrix or frame has never been broadcasted
		if (to.getBroadcastHandle() == null) {
			to.setBroadcastHandle(new BroadcastObject<MatrixBlock>());
		}
		to.getBroadcastHandle().setPartitionedBroadcast(bret,
				OptimizerUtils.estimatePartitionedSizeExactSparsity(to.getDataCharacteristics()));
		CacheableData.addBroadcastSize(to.getBroadcastHandle().getPartitionedBroadcastSize());
	}

	if (DMLScript.STATISTICS) {
		Statistics.accSparkBroadCastTime(System.nanoTime() - t0);
		Statistics.incSparkBroadcastCount(1);
	}

	return bret;
}
 
Example 17
Source File: SparkExecutionContext.java    From systemds with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
public PartitionedBroadcast<FrameBlock> getBroadcastForFrameVariable(String varname) {
	long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;

	FrameObject fo = getFrameObject(varname);

	PartitionedBroadcast<FrameBlock> bret = null;

	//reuse existing broadcast handle
	if (fo.getBroadcastHandle() != null && fo.getBroadcastHandle().isPartitionedBroadcastValid()) {
		bret = fo.getBroadcastHandle().getPartitionedBroadcast();
	}

	//create new broadcast handle (never created, evicted)
	if (bret == null) {
		//account for overwritten invalid broadcast (e.g., evicted)
		if (fo.getBroadcastHandle() != null)
			CacheableData.addBroadcastSize(-fo.getBroadcastHandle().getPartitionedBroadcastSize());

		//obtain meta data for frame
		int blen = OptimizerUtils.getDefaultFrameSize();

		//create partitioned frame block and release memory consumed by input
		FrameBlock mb = fo.acquireRead();
		PartitionedBlock<FrameBlock> pmb = new PartitionedBlock<>(mb, blen);
		fo.release();

		//determine coarse-grained partitioning
		int numPerPart = PartitionedBroadcast.computeBlocksPerPartition(fo.getNumRows(), fo.getNumColumns(), blen);
		int numParts = (int) Math.ceil((double) pmb.getNumRowBlocks() * pmb.getNumColumnBlocks() / numPerPart);
		Broadcast<PartitionedBlock<FrameBlock>>[] ret = new Broadcast[numParts];

		//create coarse-grained partitioned broadcasts
		if (numParts > 1) {
			Arrays.parallelSetAll(ret, i -> createPartitionedBroadcast(pmb, numPerPart, i));
		} else { //single partition
			ret[0] = getSparkContext().broadcast(pmb);
			if (!isLocalMaster())
				pmb.clearBlocks();
		}
		
		bret = new PartitionedBroadcast<>(ret, new MatrixCharacteristics(
			fo.getDataCharacteristics()).setBlocksize(blen));
		if (fo.getBroadcastHandle() == null)
			fo.setBroadcastHandle(new BroadcastObject<FrameBlock>());
		
		fo.getBroadcastHandle().setPartitionedBroadcast(bret,
			OptimizerUtils.estimatePartitionedSizeExactSparsity(fo.getDataCharacteristics()));
		CacheableData.addBroadcastSize(fo.getBroadcastHandle().getPartitionedBroadcastSize());
	}

	if (DMLScript.STATISTICS) {
		Statistics.accSparkBroadCastTime(System.nanoTime() - t0);
		Statistics.incSparkBroadcastCount(1);
	}

	return bret;
}
 
Example 18
Source File: SparkExecutionContext.java    From systemds with Apache License 2.0 4 votes vote down vote up
@SuppressWarnings("unchecked")
public PartitionedBroadcast<FrameBlock> getBroadcastForFrameVariable(String varname) {
	long t0 = DMLScript.STATISTICS ? System.nanoTime() : 0;

	FrameObject fo = getFrameObject(varname);

	PartitionedBroadcast<FrameBlock> bret = null;

	//reuse existing broadcast handle
	if (fo.getBroadcastHandle() != null && fo.getBroadcastHandle().isPartitionedBroadcastValid()) {
		bret = fo.getBroadcastHandle().getPartitionedBroadcast();
	}

	//create new broadcast handle (never created, evicted)
	if (bret == null) {
		//account for overwritten invalid broadcast (e.g., evicted)
		if (fo.getBroadcastHandle() != null)
			CacheableData.addBroadcastSize(-fo.getBroadcastHandle().getPartitionedBroadcastSize());

		//obtain meta data for frame
		int blen = OptimizerUtils.getDefaultFrameSize();

		//create partitioned frame block and release memory consumed by input
		FrameBlock mb = fo.acquireRead();
		PartitionedBlock<FrameBlock> pmb = new PartitionedBlock<>(mb, blen);
		fo.release();

		//determine coarse-grained partitioning
		int numPerPart = PartitionedBroadcast.computeBlocksPerPartition(fo.getNumRows(), fo.getNumColumns(), blen);
		int numParts = (int) Math.ceil((double) pmb.getNumRowBlocks() * pmb.getNumColumnBlocks() / numPerPart);
		Broadcast<PartitionedBlock<FrameBlock>>[] ret = new Broadcast[numParts];

		//create coarse-grained partitioned broadcasts
		if (numParts > 1) {
			Arrays.parallelSetAll(ret, i -> createPartitionedBroadcast(pmb, numPerPart, i));
		} else { //single partition
			ret[0] = getSparkContext().broadcast(pmb);
			if (!isLocalMaster())
				pmb.clearBlocks();
		}
		
		bret = new PartitionedBroadcast<>(ret, new MatrixCharacteristics(
			fo.getDataCharacteristics()).setBlocksize(blen));
		if (fo.getBroadcastHandle() == null)
			fo.setBroadcastHandle(new BroadcastObject<FrameBlock>());
		
		fo.getBroadcastHandle().setPartitionedBroadcast(bret,
			OptimizerUtils.estimatePartitionedSizeExactSparsity(fo.getDataCharacteristics()));
		CacheableData.addBroadcastSize(fo.getBroadcastHandle().getPartitionedBroadcastSize());
	}

	if (DMLScript.STATISTICS) {
		Statistics.accSparkBroadCastTime(System.nanoTime() - t0);
		Statistics.incSparkBroadcastCount(1);
	}

	return bret;
}
 
Example 19
Source File: ArrayExamples.java    From java-8-lambdas-exercises with MIT License 4 votes vote down vote up
public static double[] parallelInitialize(int size) {
    double[] values = new double[size];
    Arrays.parallelSetAll(values, i -> i);
    return values;
}
 
Example 20
Source File: PopulationEstimator.java    From bt with Apache License 2.0 2 votes vote down vote up
public static void main(String[] args) throws Exception {
	NumberFormat formatter = NumberFormat.getNumberInstance(Locale.GERMANY);

	int keyspaceSize = 20000000;

	formatter.setMaximumFractionDigits(30);
	
	PopulationEstimator estimator = new PopulationEstimator();
	
	
	
	System.out.println(160-Math.log(keyspaceSize)/Math.log(2));
	
	Key[] keyspace = new Key[keyspaceSize];
	Runnable r = () -> {
		Arrays.parallelSetAll(keyspace, i -> Key.createRandomKey());
	};
	//Arrays.sort(keyspace);
	
	for(int i=0;i<100;i++)
	{
		if(i % 20 == 0)
			r.run();

		Key target = Key.createRandomKey();

		Arrays.parallelSort(keyspace, new Key.DistanceOrder(target));


		int sizeGoal = 8;

		TreeSet<Key> closestSet = new TreeSet<>();

		for(int j=0;j<sizeGoal;j++)
			closestSet.add(keyspace[j]);

		//estimator.update(closestSet);
		estimator.update(closestSet,target);
	}
	
}