Java Code Examples for java.util.TreeMap#clear()

The following examples show how to use java.util.TreeMap#clear() . These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
private void test32Collision(String message, Method method, Object[] datas, Collection<Object> hashes, TreeMap<Integer, Integer> counts) throws Exception {
    double size = 1000D;
    double step = Integer.MAX_VALUE;
    step -= Integer.MIN_VALUE;
    step /= size;

    for (int index = 0; index < size; index++) {
        counts.put((int) (Integer.MAX_VALUE - step * index), 0);
    }

    long time = System.currentTimeMillis();
    int collision = 0;
    for (Object data : datas) {
        Integer hash = Integer.class.cast(method.invoke(null, data));
        Entry<Integer, Integer> term = counts.higherEntry(hash);
        counts.put(term.getKey(), term.getValue() + 1);
        if (!hashes.add(hash)) {
            collision++;
        }
    }

    message = StringUtility.format("{}使用[{}]算法,哈希{}次,冲突{}次,方差{},时间{}毫秒", message, method.getName(), datas.length, collision, (long) getVariance(counts.values()), System.currentTimeMillis() - time);
    logger.debug(message);
    hashes.clear();
    counts.clear();
}
 
Example 2
private void test64Collision(String message, Method method, Object[] datas, Collection<Object> hashes, TreeMap<Long, Integer> counts) throws Exception {
    double size = 1000D;
    double step = Long.MAX_VALUE;
    step -= Long.MIN_VALUE;
    step /= size;

    for (int index = 0; index < size; index++) {
        counts.put((long) (Long.MAX_VALUE - step * index), 0);
    }

    long time = System.currentTimeMillis();
    int collision = 0;
    for (Object data : datas) {
        Long hash = Long.class.cast(method.invoke(null, data.toString()));
        Entry<Long, Integer> term = counts.higherEntry(hash);
        counts.put(term.getKey(), term.getValue() + 1);
        if (!hashes.add(hash)) {
            collision++;
        }
    }

    message = StringUtility.format("{}使用[{}]算法,哈希{}次,冲突{}次,方差{},时间{}毫秒", message, method.getName(), datas.length, collision, (long) getVariance(counts.values()), System.currentTimeMillis() - time);
    logger.debug(message);
    hashes.clear();
    counts.clear();
}
 
Example 3
Source Project: MiBandDecompiled   File: ai.java    License: Apache License 2.0 6 votes vote down vote up
private static void a(List list)
{
    if (list == null || list.size() <= 0)
    {
        return;
    }
    HashMap hashmap = new HashMap();
    for (int i1 = 0; i1 < list.size(); i1++)
    {
        ScanResult scanresult = (ScanResult)list.get(i1);
        if (scanresult.SSID == null)
        {
            scanresult.SSID = "null";
        }
        hashmap.put(Integer.valueOf(scanresult.level), scanresult);
    }

    TreeMap treemap = new TreeMap(Collections.reverseOrder());
    treemap.putAll(hashmap);
    list.clear();
    for (Iterator iterator = treemap.keySet().iterator(); iterator.hasNext(); list.add(treemap.get((Integer)iterator.next()))) { }
    hashmap.clear();
    treemap.clear();
}
 
Example 4
Source Project: OSPREY3   File: BufferPool.java    License: GNU General Public License v2.0 6 votes vote down vote up
public BufferPool(BufferFactory<T> factory, BufferExpander<T> expand) {
	
	this.factory = factory;
	this.expand = expand;
	this.buffers = new TreeMap<>();
	
	// setup the cleaner
	// NOTE: copy the buffers ref, so the cleaner doesn't hold a strong reference to this
	final TreeMap<Integer,List<CUBuffer<T>>> buffers = this.buffers;
	cleaner = () -> {
		for (List<CUBuffer<T>> bufs : buffers.values()) {
			for (CUBuffer<T> buf : bufs) {
				buf.cleanup();
			}
		}
		buffers.clear();
	};
	Cleaner.addCleaner(this, cleaner);
}
 
Example 5
Source Project: siddhi   File: IndexEventHolder.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void deleteAll() {
    if (isOperationLogEnabled) {
        if (!isFullSnapshot()) {
            operationChangeLog.add(new Operation(CLEAR));
        } else {
            operationChangeLog.clear();
            forceFullSnapshot = true;
        }
    }
    if (primaryKeyData != null) {
        primaryKeyData.clear();
    }
    if (indexData != null) {
        for (TreeMap<Object, Set<StreamEvent>> aIndexedData : indexData.values()) {
            aIndexedData.clear();
        }
    }
}
 
Example 6
/**
 * Remove all mappings from this map.
 */
public void clear() {
    if (fast) {
        synchronized (this) {
            map = new TreeMap();
        }
    } else {
        synchronized (map) {
            map.clear();
        }
    }
}
 
Example 7
Source Project: util4j   File: NMapConvert.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * 将nmap对象转换为object到目标map
 * 目标map将被清空
 * @param nmap
 * @param map
 */
public final void toTreeMap(NMap nmap,TreeMap<Object,Object> map)
{
	map.clear();
	Set<Entry<NType<?>, NType<?>>> set=nmap.entrySet();
	for(Entry<NType<?>, NType<?>> kv:set)
	{
		map.put(toTreeMapObject(kv.getKey()),toTreeMapObject(kv.getValue()));
	}
}
 
Example 8
/**
 * Maps with same contents are equal
 */
public void testEquals() {
    TreeMap map1 = map5();
    TreeMap map2 = map5();
    assertEquals(map1, map2);
    assertEquals(map2, map1);
    map1.clear();
    assertFalse(map1.equals(map2));
    assertFalse(map2.equals(map1));
}
 
Example 9
@Override
public void close()
{
	synchronized (mLock)
	{
		for (final Map.Entry<Integer, TreeMap<Long, SwitchableSoftReference<Volume>>> lTimeLineForChannelEntry : mChannelToVolumeListsMap.entrySet())
		{
			final TreeMap<Long, SwitchableSoftReference<Volume>> lTimeLineTreeMap = lTimeLineForChannelEntry.getValue();

			for (final Map.Entry<Long, SwitchableSoftReference<Volume>> lTimePointEntry : lTimeLineTreeMap.entrySet())
			{
				final SwitchableSoftReference<Volume> lVolumeSoftReference = lTimePointEntry.getValue();
				final Volume lVolume = lVolumeSoftReference.get();
				if (lVolume != null)
					lVolume.close();
				lVolumeSoftReference.soften();
			}

			lTimeLineTreeMap.clear();
		}
		mChannelToVolumeListsMap.clear();
		mChannelToVolumeListsMap = null;

		mAvailableChannels.clear();
	}

}
 
Example 10
Source Project: openmeetings   File: BackupImport.java    License: Apache License 2.0 5 votes vote down vote up
private <T extends BaseFileItem> void saveTree(
		File baseDir
		, String fileName
		, String listNodeName
		, String nodeName
		, Class<T> clazz
		, Map<Long, Long> folders
		, Consumer<T> save
		)
{
	TreeMap<Long, T> items = new TreeMap<>();
	readList(baseDir, fileName, listNodeName, nodeName, clazz, f -> {
		items.put(f.getId(), f);
	}, false);
	FileTree<T> tree = new FileTree<>();
	TreeMap<Long, T> remain = new TreeMap<>();
	int counter = items.size(); //max iterations
	while (counter > 0 && !items.isEmpty()) {
		Entry<Long, T> e = items.pollFirstEntry();
		if (e == null) {
			break;
		} else {
			if (!tree.add(e.getValue())) {
				remain.put(e.getKey(), e.getValue());
			}
		}
		if (items.isEmpty()) {
			counter = Math.min(counter - 1, remain.size());
			items.putAll(remain);
			remain.clear();
		}
	}
	remain.entrySet().forEach(e -> log.warn("Doungling file/recording: {}", e.getValue()));
	tree.process(f -> isInvalidFile(f, folders), save);
}
 
Example 11
Source Project: jelectrum   File: LobstackTest.java    License: MIT License 5 votes vote down vote up
@Test
public void testPutAllLarge()
  throws Exception
{
  Lobstack ls = openStack("test_large"); 

  Random rnd = new Random();

  TreeMap<String, ByteBuffer> insert_map = new TreeMap<String, ByteBuffer>();

  for(int j=0; j<1; j++)
  {
    for(int i=0; i<8192; i++)
    {
      String key = "" + rnd.nextInt(10000000);
      while(key.length() < 8) key = "0" + key;
      byte[] buff = new byte[1024];
      rnd.nextBytes(buff);
      insert_map.put("random_put_all:" + key, ByteBuffer.wrap(buff));
    }
    ls.putAll(insert_map);
    insert_map.clear();
  }

  ls.printTimeReport(System.out);

}
 
Example 12
@Test
public void putEvictMetricsFromCacheSlicesMerging() throws Exception {
  long cacheSliceIntervalMillis = 30000L;

  long startTime = getRoundedCheckPointTimeMillis(System.currentTimeMillis(), cacheSliceIntervalMillis);

  long seconds = 1000;
  TreeMap<Long, Double> metricValues = new TreeMap<Long, Double>();
  /*

  0        +30s      +60s
  |         |         |
   (1)(2)(3) (4)(5)(6)  h1

  */
  // Case 1 : data points are distributed equally, no values are lost, single host.
  metricValues.put(startTime + 4*seconds, 1.0);
  metricValues.put(startTime + 14*seconds, 2.0);
  metricValues.put(startTime + 24*seconds, 3.0);
  metricValues.put(startTime + 34*seconds, 4.0);
  metricValues.put(startTime + 44*seconds, 5.0);
  metricValues.put(startTime + 54*seconds, 6.0);

  TimelineMetric timelineMetric = new TimelineMetric("metric1", "host1", "app1", "instance1");
  timelineMetric.setStartTime(metricValues.firstKey());
  timelineMetric.addMetricValues(metricValues);

  Collection<TimelineMetric> timelineMetrics = new ArrayList<>();
  timelineMetrics.add(timelineMetric);
  timelineMetricsIgniteCache.putMetrics(timelineMetrics);
  Map<TimelineClusterMetric, MetricClusterAggregate> aggregateMap = timelineMetricsIgniteCache.evictMetricAggregates(startTime, startTime + 120*seconds);

  Assert.assertEquals(aggregateMap.size(), 2);
  TimelineClusterMetric timelineClusterMetric = new TimelineClusterMetric(timelineMetric.getMetricName(),
    timelineMetric.getAppId(), timelineMetric.getInstanceId(), startTime + 30*seconds);

  Assert.assertTrue(aggregateMap.containsKey(timelineClusterMetric));
  Assert.assertEquals(2.0, aggregateMap.get(timelineClusterMetric).getSum());

  timelineClusterMetric.setTimestamp(startTime + 2*30*seconds);
  Assert.assertTrue(aggregateMap.containsKey(timelineClusterMetric));
  Assert.assertEquals(5.0, aggregateMap.get(timelineClusterMetric).getSum());

  metricValues.clear();
  timelineMetrics.clear();

  /*

  0        +30s      +60s
  |         |         |
   (1)(2)(3) (4)(5)(6)   h1, h2

  */
  // Case 2 : data points are distributed equally, no values are lost, two hosts.
  metricValues.put(startTime + 4*seconds, 1.0);
  metricValues.put(startTime + 14*seconds, 2.0);
  metricValues.put(startTime + 24*seconds, 3.0);
  metricValues.put(startTime + 34*seconds, 4.0);
  metricValues.put(startTime + 44*seconds, 5.0);
  metricValues.put(startTime + 54*seconds, 6.0);

  timelineMetric = new TimelineMetric("metric1", "host1", "app1", "instance1");
  timelineMetric.setMetricValues(metricValues);

  metricValues = new TreeMap<>();
  metricValues.put(startTime + 5*seconds, 2.0);
  metricValues.put(startTime + 15*seconds, 4.0);
  metricValues.put(startTime + 25*seconds, 6.0);
  metricValues.put(startTime + 35*seconds, 8.0);
  metricValues.put(startTime + 45*seconds, 10.0);
  metricValues.put(startTime + 55*seconds, 12.0);
  TimelineMetric timelineMetric2 = new TimelineMetric("metric1", "host2", "app1", "instance1");
  timelineMetric2.setMetricValues(metricValues);

  timelineMetrics = new ArrayList<>();
  timelineMetrics.add(timelineMetric);
  timelineMetrics.add(timelineMetric2);
  timelineMetricsIgniteCache.putMetrics(timelineMetrics);
  aggregateMap = timelineMetricsIgniteCache.evictMetricAggregates(startTime, startTime + 120*seconds);

  Assert.assertEquals(aggregateMap.size(), 2);
  timelineClusterMetric = new TimelineClusterMetric(timelineMetric.getMetricName(),
    timelineMetric.getAppId(), timelineMetric.getInstanceId(), startTime + 30*seconds);

  Assert.assertTrue(aggregateMap.containsKey(timelineClusterMetric));
  Assert.assertEquals(6.0, aggregateMap.get(timelineClusterMetric).getSum());

  timelineClusterMetric.setTimestamp(startTime + 2*30*seconds);
  Assert.assertTrue(aggregateMap.containsKey(timelineClusterMetric));
  Assert.assertEquals(15.0, aggregateMap.get(timelineClusterMetric).getSum());

  metricValues.clear();
  timelineMetrics.clear();

  Assert.assertEquals(0d, timelineMetricsIgniteCache.getPointInTimeCacheMetrics().get("Cluster_KeySize"));
}
 
Example 13
Source Project: mini2Dx   File: ByteTreeMapTest.java    License: Apache License 2.0 4 votes vote down vote up
private void clear(TreeMap<Byte, String> treeMap, ByteTreeMap<String> byteTreeMap) {
	treeMap.clear();
	byteTreeMap.clear();
}
 
Example 14
Source Project: mini2Dx   File: IntTreeMapTest.java    License: Apache License 2.0 4 votes vote down vote up
private void clear(TreeMap<Integer, String> treeMap, IntTreeMap intTreeMap) {
	treeMap.clear();
	intTreeMap.clear();
}
 
Example 15
Source Project: examples   File: HDHTAppTest.java    License: Apache License 2.0 4 votes vote down vote up
@Test
public void test() throws Exception
{
  File file = new File("target/hds2");
  FileUtils.deleteDirectory(file);

  LocalMode lma = LocalMode.newInstance();
  Configuration conf = new Configuration(false);
  conf.set("dt.operator.Store.fileStore.basePath", file.toURI().toString());
  //conf.set("dt.operator.Store.flushSize", "0");
  conf.set("dt.operator.Store.flushIntervalCount", "1");
  conf.set("dt.operator.Store.partitionCount", "2");

  lma.prepareDAG(new HDHTAppTest(), conf);
  LocalMode.Controller lc = lma.getController();
  //lc.setHeartbeatMonitoringEnabled(false);
  lc.runAsync();

  long tms = System.currentTimeMillis();
  File f0 = new File(file, "0/0-0");
  File f1 = new File(file, "1/1-0");
  File wal0 = new File(file, "0/_WAL-0");
  File wal1 = new File(file, "1/_WAL-0");

  while (System.currentTimeMillis() - tms < 30000) {
    if (f0.exists() && f1.exists()) break;
    Thread.sleep(100);
  }
  lc.shutdown();

  Assert.assertTrue("exists " + f0, f0.exists() && f0.isFile());
  Assert.assertTrue("exists " + f1, f1.exists() && f1.isFile());
  Assert.assertTrue("exists " + wal0, wal0.exists() && wal0.exists());
  Assert.assertTrue("exists " + wal1, wal1.exists() && wal1.exists());

  HDHTFileAccessFSImpl fs = new TFileImpl.DTFileImpl();

  fs.setBasePath(file.toURI().toString());
  fs.init();

  TreeMap<Slice, byte[]> data = new TreeMap<Slice, byte[]>(new HDHTReader.DefaultKeyComparator());
  fs.getReader(0, "0-0").readFully(data);
  Assert.assertArrayEquals("read key=" + new String(KEY0), DATA0.getBytes(), data.get(new Slice(KEY0)));

  data.clear();
  fs.getReader(1, "1-0").readFully(data);
  Assert.assertArrayEquals("read key=" + new String(KEY1), DATA1.getBytes(), data.get(new Slice(KEY1)));

  fs.close();
}
 
Example 16
Source Project: jelectrum   File: UtxoTrieMgr.java    License: MIT License 4 votes vote down vote up
public synchronized void loadDB(InputStream in)
  throws java.io.IOException
{
  node_map.clear();
  last_added_block_hash = null;
  last_flush_block_hash = null;
  byte[] hash_data = new byte[32];

  DataInputStream d_in = new DataInputStream(in);
  d_in.readFully(hash_data);
  Sha256Hash read_hash = new Sha256Hash(hash_data);

  System.out.println("Reading block: " + read_hash);
  int node_count=0;

  TreeMap<String, UtxoTrieNode> save_map = new TreeMap<>();
  StatData size_info = new StatData();

  while(true)
  {
    int size = d_in.readInt();
    if (size == 0) break;
    byte[] data = new byte[size];
    d_in.readFully(data);
    UtxoTrieNode node = new UtxoTrieNode(ByteString.copyFrom(data));

    size_info.addDataPoint(size);


    save_map.put(node.getPrefix(), node);
    node_count++;
    if (node_count % 1000 == 0)
    {
      db_map.putAll(save_map);
      save_map.clear();
      System.out.print('.');
    }

  }
  db_map.putAll(save_map);
  save_map.clear();
  System.out.print('.');
  System.out.println();
  System.out.println("Saved " + node_count + " nodes");
  
  saveState(new UtxoStatus(read_hash));

  System.out.println("UTXO root hash: " + getRootHash(null));

  size_info.print("sizes", new DecimalFormat("0.0"));

}
 
Example 17
Source Project: mini2Dx   File: ShortTreeMapTest.java    License: Apache License 2.0 4 votes vote down vote up
private void clear(TreeMap<Short, String> treeMap, ShortTreeMap<String> shortTreeMap) {
	treeMap.clear();
	shortTreeMap.clear();
}
 
Example 18
Source Project: j2objc   File: TreeMapTest.java    License: Apache License 2.0 4 votes vote down vote up
/**
 * clear removes all pairs
 */
public void testClear() {
    TreeMap map = map5();
    map.clear();
    assertEquals(0, map.size());
}
 
Example 19
private void getPhotoList(Wandora currentAdmin, String jsonAPI, FlickrAssoc association, String relationship) throws JSONException, TopicMapException, RequestFailure, ExtractionFailure, UserCancellation {
    int totalPhotos = 0;
    int photosReceived = 0;
  
    TreeMap<String, String> args = new TreeMap();
    args.put("group_id", curGroup.ID);
    args.put("extras", "date_taken,date_upload,o_dims,geo,last_update,license,media,owner_name,tags,views");
    args.put("per_page", "" + photosPerPage);

    JSONObject result = getFlickrState().authorizedCall(jsonAPI, args, FlickrState.PermRead, currentAdmin);

    totalPhotos = FlickrUtils.searchInt(result, "photos.total");
    final int pageCount = 1 + totalPhotos / photosPerPage;
    getCurrentLogger().setProgressMax(totalPhotos);
    log("-- Getting info for " + totalPhotos + " photos in " + curGroup.Name + "'s pool.");
    for(int nextPageIndex = 2; nextPageIndex <= (pageCount + 1); ++nextPageIndex) {
        getCurrentLogger().setProgress(photosReceived);
        JSONArray photosArray = FlickrUtils.searchJSONArray(result, "photos.photo");
        int received = photosArray.length();
        log("-- -- Getting info for photos " + (photosReceived + 1) + " - " + (photosReceived + received) + " out of " + totalPhotos);

        for(int i = 0; i < received; ++i) {
            FlickrPhoto p = FlickrPhoto.makeFromPublicPhotoList(photosArray.getJSONObject(i));
            Topic photoTopic = p.makeTopic(this);
            FlickrUtils.createAssociation(currentMap, getAssociation(association), new Topic[] { photoTopic, curGroupTopic });
        }

        photosReceived += received;
        /*
        if(photosReceived >= totalPhotos || received <= perPage)
        {
            break;
        }
        */
        if(forceStop()) {
            log("-- -- Cancellation requested; finished getting info for " + photosReceived + " out of " + totalPhotos + " photos.");
            break;
        }
        
        args.clear();
        args.put("group_id", curGroup.ID);
        args.put("extras", "date_taken,date_upload,o_dims,geo,last_update,license,media,owner_name,tags,views");
        args.put("per_page", "" + photosPerPage);
        args.put("page", "" + nextPageIndex);
        result = getFlickrState().authorizedCall(jsonAPI, args, FlickrState.PermRead, currentAdmin);
    }
    
    if(photosReceived < totalPhotos) {
        log("" + (totalPhotos - photosReceived) + " photos not sent by flickr");
    }
}
 
Example 20
private void getPhotoList(Wandora currentAdmin, String jsonAPI, FlickrAssoc association, String relationship) throws JSONException, TopicMapException, RequestFailure, ExtractionFailure, UserCancellation {
    int totalPhotos = 0;
    int photosReceived = 0;

    TreeMap<String, String> args = new TreeMap();
    args.put("user_id", curPerson.ID);
    args.put("extras", "date_taken,date_upload,o_dims,geo,last_update,license,media,owner_name,tags,views");
    args.put("per_page", "" + photosPerPage);

    JSONObject result = getFlickrState().authorizedCall(jsonAPI, args, FlickrState.PermRead, currentAdmin);

    totalPhotos = FlickrUtils.searchInt(result, "photos.total");
    final int pageCount = 1 + totalPhotos / photosPerPage;
    
    getCurrentLogger().setProgressMax(totalPhotos);
    log("-- Getting info for " + totalPhotos + " photos " + relationship + " by " + curPerson.UserName + ".");
    for(int nextPageIndex = 2; nextPageIndex <= (pageCount + 1); ++nextPageIndex) {
        getCurrentLogger().setProgress(photosReceived);
        JSONArray photosArray = FlickrUtils.searchJSONArray(result, "photos.photo");
        int received = photosArray.length();
        log("-- -- Getting info for photos " + (photosReceived + 1) + " - " + (photosReceived + received) + " out of " + totalPhotos);
        photosReceived += received;

        for(int i = 0; i < received; ++i) {
            FlickrPhoto p = FlickrPhoto.makeFromPublicPhotoList(photosArray.getJSONObject(i));
            Topic photoTopic = p.makeTopic(this);
            FlickrUtils.createAssociation(currentMap, getAssociation(association), new Topic[] { photoTopic, curPersonTopic });
        }
        
        if(forceStop()) {
            log("-- -- Cancellation requested; finished getting info for " + photosReceived + " out of " + totalPhotos + " photos.");
            break;
        }
        
        args.clear();
        args.put("user_id", curPerson.ID);
        args.put("extras", "date_taken,date_upload,o_dims,geo,last_update,license,media,owner_name,tags,views");
        args.put("per_page", "" + photosPerPage);
        args.put("page", "" + nextPageIndex);
        result = getFlickrState().authorizedCall(jsonAPI, args, FlickrState.PermRead, currentAdmin);

    }
    if(photosReceived < totalPhotos) {
        log("" + (totalPhotos - photosReceived) + " photos not sent by flickr");
    }
}