Java Code Examples for java.util.concurrent.ConcurrentHashMap.merge()

The following are Jave code examples for showing how to use merge() of the java.util.concurrent.ConcurrentHashMap class. You can vote up the examples you like. Your votes will be used in our system to get more good examples.
+ Save this method
Example 1
Project: openjdk-jdk10   File: ConcurrentHashMap8Test.java   View Source Code Vote up 5 votes
/**
 * merge removes when the given key is present and function returns null
 */
public void testMerge3() {
    ConcurrentHashMap map = map5();
    map.merge(one, "Y", (x, y) -> null);
    assertFalse(map.containsKey(one));
}
 
Example 2
Project: Higher-Cloud-Computing-Project   File: KMeansDistributedClusterer.java   View Source Code Vote up 5 votes
/** Initialize cluster centers. */
private Vector[] initClusterCenters(SparseDistributedMatrix points, int k) {
    // Initialize empty centers and point costs.
    int ptsCnt = points.rowSize();

    String cacheName = ((SparseDistributedMatrixStorage)points.getStorage()).cacheName();

    // Initialize the first center to a random point.
    Vector sample = localCopyOf(points.viewRow(rnd.nextInt(ptsCnt)));

    List<Vector> centers = new ArrayList<>();
    List<Vector> newCenters = new ArrayList<>();
    newCenters.add(sample);
    centers.add(sample);

    final ConcurrentHashMap<Integer, Double> costs = new ConcurrentHashMap<>();

    // On each step, sample 2 * k points on average with probability proportional
    // to their squared distance from the centers. Note that only distances between points
    // and new centers are computed in each iteration.
    int step = 0;
    UUID uid = points.getUUID();

    while (step < initSteps) {
        // We assume here that costs can fit into memory of one node.
        ConcurrentHashMap<Integer, Double> newCosts = getNewCosts(points, newCenters, cacheName);

        // Merge costs with new costs.
        for (Integer ind : newCosts.keySet())
            costs.merge(ind, newCosts.get(ind), Math::min);

        double sumCosts = costs.values().stream().mapToDouble(Double::valueOf).sum();

        newCenters = getNewCenters(k, costs, uid, sumCosts, cacheName);
        centers.addAll(newCenters);

        step++;
    }

    List<Vector> distinctCenters = centers.stream().distinct().collect(Collectors.toList());

    if (distinctCenters.size() <= k)
        return distinctCenters.toArray(new Vector[] {});
    else {
        // Finally, we might have a set of more than k distinct candidate centers; weight each
        // candidate by the number of points in the dataset mapping to it and run a local k-means++
        // on the weighted centers to pick k of them
        ConcurrentHashMap<Integer, Integer> centerInd2Weight = weightCenters(uid, distinctCenters, cacheName);

        List<Double> weights = new ArrayList<>(centerInd2Weight.size());

        for (int i = 0; i < distinctCenters.size(); i++)
            weights.add(i, Double.valueOf(centerInd2Weight.getOrDefault(i, 0)));

        DenseLocalOnHeapMatrix dCenters = MatrixUtil.fromList(distinctCenters, true);

        return new KMeansLocalClusterer(getDistanceMeasure(), 30, seed).cluster(dCenters, k, weights).centers();
    }
}
 
Example 3
Project: Higher-Cloud-Computing-Project   File: FuzzyCMeansDistributedClusterer.java   View Source Code Vote up 4 votes
/**
 * Choose k primary centers from source points.
 *
 * @param points Matrix with source points.
 * @param k Number of centers.
 * @return Array of primary centers.
 */
private Vector[] initializeCenters(SparseDistributedMatrix points, int k) {
    int pointsNum = points.rowSize();

    Vector firstCenter = points.viewRow(rnd.nextInt(pointsNum));

    List<Vector> centers = new ArrayList<>();
    List<Vector> newCenters = new ArrayList<>();

    centers.add(firstCenter);
    newCenters.add(firstCenter);

    ConcurrentHashMap<Integer, Double> costs = new ConcurrentHashMap<>();

    int step = 0;
    UUID uuid = points.getUUID();
    String cacheName = ((SparseDistributedMatrixStorage) points.getStorage()).cacheName();

    while(step < initSteps) {
        ConcurrentHashMap<Integer, Double> newCosts = getNewCosts(cacheName, uuid, newCenters);

        for (Integer key : newCosts.keySet())
            costs.merge(key, newCosts.get(key), Math::min);

        double costsSum = costs.values().stream().mapToDouble(Double::valueOf).sum();

        newCenters = getNewCenters(cacheName, uuid, costs, costsSum, k);
        centers.addAll(newCenters);

        step++;
    }

    return chooseKCenters(cacheName, uuid, centers, k);
}