Java Code Examples for com.google.common.cache.CacheBuilder#maximumSize()

The following examples show how to use com.google.common.cache.CacheBuilder#maximumSize() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: CachingTableProvider.java    From samza with Apache License 2.0 6 votes vote down vote up
private ReadWriteTable createDefaultCacheTable(String tableId, JavaTableConfig tableConfig) {
  long readTtlMs = Long.parseLong(tableConfig.getForTable(tableId, CachingTableDescriptor.READ_TTL_MS, "-1"));
  long writeTtlMs = Long.parseLong(tableConfig.getForTable(tableId, CachingTableDescriptor.WRITE_TTL_MS, "-1"));
  long cacheSize = Long.parseLong(tableConfig.getForTable(tableId, CachingTableDescriptor.CACHE_SIZE, "-1"));

  CacheBuilder cacheBuilder = CacheBuilder.newBuilder();
  if (readTtlMs != -1) {
    cacheBuilder.expireAfterAccess(readTtlMs, TimeUnit.MILLISECONDS);
  }
  if (writeTtlMs != -1) {
    cacheBuilder.expireAfterWrite(writeTtlMs, TimeUnit.MILLISECONDS);
  }
  if (cacheSize != -1) {
    cacheBuilder.maximumSize(cacheSize);
  }

  logger.info(String.format("Creating default cache with: readTtl=%d, writeTtl=%d, maxSize=%d",
      readTtlMs, writeTtlMs, cacheSize));

  GuavaCacheTable cacheTable = new GuavaCacheTable(tableId + "-def-cache", cacheBuilder.build());
  cacheTable.init(this.context);

  return cacheTable;
}
 
Example 2
Source File: GuavaCacheFactoryBean.java    From spring-cache-demo with GNU General Public License v2.0 6 votes vote down vote up
@Override
public void afterPropertiesSet() {
    CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();
    if (maximumSize != null) {
      builder.maximumSize(maximumSize);
    }
    if (expireAfterAccessInSeconds != null) {
      builder.expireAfterAccess(expireAfterAccessInSeconds, TimeUnit.SECONDS);
    }
    if (expireAfterWriteInSeconds != null) {
      builder.expireAfterWrite(expireAfterWriteInSeconds, TimeUnit.SECONDS);
    }
    
    com.google.common.cache.Cache<Object, Object> guavaCache= builder.build();
    this.cache = new GuavaCache(this.name, guavaCache, this.allowNullValues);
}
 
Example 3
Source File: PerBuildSyscallCache.java    From bazel with Apache License 2.0 6 votes vote down vote up
public PerBuildSyscallCache build() {
  CacheBuilder<Object, Object> statCacheBuilder = CacheBuilder.newBuilder();
  if (maxStats != UNSET) {
    statCacheBuilder = statCacheBuilder.maximumSize(maxStats);
  }
  CacheBuilder<Object, Object> readdirCacheBuilder = CacheBuilder.newBuilder();
  if (maxReaddirs != UNSET) {
    readdirCacheBuilder = readdirCacheBuilder.maximumSize(maxReaddirs);
  }
  if (concurrencyLevel != UNSET) {
    statCacheBuilder = statCacheBuilder.concurrencyLevel(concurrencyLevel);
    readdirCacheBuilder = readdirCacheBuilder.concurrencyLevel(concurrencyLevel);
  }
  return new PerBuildSyscallCache(statCacheBuilder.build(newStatLoader()),
      readdirCacheBuilder.build(newReaddirLoader()));
}
 
Example 4
Source File: CommonUtil.java    From ChangeSkin with MIT License 6 votes vote down vote up
public static <K, V> ConcurrentMap<K, V> buildCache(int seconds, int maxSize) {
    CacheBuilder<Object, Object> builder = CacheBuilder.newBuilder();

    if (seconds > 0) {
        builder.expireAfterWrite(seconds, TimeUnit.SECONDS);
    }

    if (maxSize > 0) {
        builder.maximumSize(maxSize);
    }

    return builder.build(new CacheLoader<K, V>() {
        @Override
        public V load(K key) {
            throw new UnsupportedOperationException("Not supported yet.");
        }
    }).asMap();
}
 
Example 5
Source File: NetflowV9Decoder.java    From datacollector with Apache License 2.0 6 votes vote down vote up
public static Cache<FlowSetTemplateCacheKey, FlowSetTemplate> buildTemplateCache(
    int maxTemplateCacheSize,
    int templateCacheTimeoutMs
) {
  CacheBuilder<Object, Object> cacheBuilder = CacheBuilder.newBuilder();
  if (maxTemplateCacheSize > 0) {
    cacheBuilder = cacheBuilder.maximumSize(maxTemplateCacheSize);
  }
  if (templateCacheTimeoutMs > 0) {
    cacheBuilder = cacheBuilder.expireAfterAccess(templateCacheTimeoutMs, TimeUnit.MILLISECONDS);
  }
  if (LOG.isTraceEnabled()) {
    cacheBuilder = cacheBuilder.removalListener((notification) -> LOG.trace(
        "Removing flow set template entry {} for cause: {} ",
        notification.getKey(),
        notification.getCause()
    ));
  }
  return cacheBuilder.build();
}
 
Example 6
Source File: HMSCache.java    From datacollector with Apache License 2.0 6 votes vote down vote up
/**
 * Build instance of {@link HMSCache}
 * @return {@link HMSCache}
 */
@SuppressWarnings("unchecked")
public HMSCache build() throws StageException {
  Utils.checkArgument(
      !cacheTypes.isEmpty(),
      "Invalid HMSCache Configuration, Should support at least one type of cache"
  );

  Map<HMSCacheType, Cache<String, Optional<HMSCacheSupport.HMSCacheInfo>>> cacheMap = new HashMap<>();
  CacheBuilder cacheBuilder = CacheBuilder.newBuilder();

  if (maxCacheSize > 0) {
    cacheBuilder.maximumSize(maxCacheSize);
  }

  for (HMSCacheType type : cacheTypes) {
    cacheMap.put(type, cacheBuilder.build());
  }
  return new HMSCache(cacheMap);
}
 
Example 7
Source File: DefaultActiveTraceRepository.java    From pinpoint with Apache License 2.0 5 votes vote down vote up
private ConcurrentMap<ActiveTraceHandle, ActiveTrace> createCache(int maxActiveTraceSize) {
    final CacheBuilder<Object, Object> cacheBuilder = CacheBuilder.newBuilder();
    cacheBuilder.concurrencyLevel(64);
    cacheBuilder.initialCapacity(maxActiveTraceSize);
    cacheBuilder.maximumSize(maxActiveTraceSize);

    final Cache<ActiveTraceHandle, ActiveTrace> localCache = cacheBuilder.build();
    return localCache.asMap();
}
 
Example 8
Source File: CacheStore.java    From attic-apex-malhar with Apache License 2.0 5 votes vote down vote up
@Override
public void connect() throws IOException
{
  open = true;

  if (numInitCacheLines > maxCacheSize) {
    logger.warn("numInitCacheLines = {} is greater than maxCacheSize = {}, maxCacheSize was set to {}", numInitCacheLines,
        maxCacheSize, numInitCacheLines);
    maxCacheSize = numInitCacheLines;
  }

  CacheBuilder<Object, Object> cacheBuilder = CacheBuilder.newBuilder();
  cacheBuilder.maximumSize(maxCacheSize);

  if (entryExpiryStrategy == ExpiryType.EXPIRE_AFTER_ACCESS) {
    cacheBuilder.expireAfterAccess(entryExpiryDurationInMillis, TimeUnit.MILLISECONDS);
  } else if (entryExpiryStrategy == ExpiryType.EXPIRE_AFTER_WRITE) {
    cacheBuilder.expireAfterWrite(entryExpiryDurationInMillis, TimeUnit.MILLISECONDS);
  }
  cache = cacheBuilder.build();

  if (entryExpiryStrategy == ExpiryType.NO_EVICTION) {
    return;
  }

  this.cleanupScheduler = Executors.newScheduledThreadPool(1);
  cleanupScheduler.scheduleAtFixedRate(new Runnable()
  {
    @Override
    public void run()
    {
      cache.cleanUp();
    }
  }, cacheCleanupIntervalInMillis, cacheCleanupIntervalInMillis, TimeUnit.MILLISECONDS);
}
 
Example 9
Source File: BigQueryTarget.java    From datacollector with Apache License 2.0 5 votes vote down vote up
@Override
@SuppressWarnings("unchecked")
public List<ConfigIssue> init() {
  List<ConfigIssue> issues = super.init();

  conf.credentials.getCredentialsProvider(getContext(), issues).ifPresent(provider -> {
    if (issues.isEmpty()) {
      try {
        Optional.ofNullable(provider.getCredentials()).ifPresent(c -> bigQuery = BigQueryDelegate.getBigquery(c, conf.credentials.projectId));
      } catch (IOException e) {
        LOG.error(Errors.BIGQUERY_05.getMessage(), e);
        issues.add(getContext().createConfigIssue(
            Groups.CREDENTIALS.name(),
            "conf.credentials.credentialsProvider",
            Errors.BIGQUERY_05
        ));
      }
    }
  });

  dataSetEval = getContext().createELEval("datasetEL");
  tableNameELEval = getContext().createELEval("tableNameEL");
  rowIdELEval = getContext().createELEval("rowIdExpression");

  CacheBuilder tableIdExistsCacheBuilder = CacheBuilder.newBuilder();
  if (conf.maxCacheSize != -1) {
    tableIdExistsCacheBuilder.maximumSize(conf.maxCacheSize);
  }

  tableIdExistsCache = tableIdExistsCacheBuilder.build(new CacheLoader<TableId, Boolean>() {
    @Override
    public Boolean load(TableId key) throws Exception {
      return bigQuery.getTable(key) != null;
    }
  });
  errorRecordHandler = new DefaultErrorRecordHandler(getContext());

  return issues;
}
 
Example 10
Source File: DefaultSimpleCache.java    From alfresco-repository with GNU Lesser General Public License v3.0 5 votes vote down vote up
/**
 * Construct a cache using the specified capacity and name.
 * 
 * @param maxItems The cache capacity. 0 = use {@link #DEFAULT_CAPACITY}
 * @param useMaxItems Whether the maxItems value should be applied as a size-cap for the cache.
 * @param cacheName An arbitrary cache name.
 */
@SuppressWarnings("unchecked")
public DefaultSimpleCache(int maxItems, boolean useMaxItems, int ttlSecs, int maxIdleSecs, String cacheName)
{
    if (maxItems == 0)
    {
        maxItems = DEFAULT_CAPACITY;
    }
    else if (maxItems < 0)
    {
        throw new IllegalArgumentException("maxItems may not be negative, but was " + maxItems);
    }
    this.maxItems = maxItems;
    this.useMaxItems = useMaxItems;
    this.ttlSecs = ttlSecs;
    this.maxIdleSecs = maxIdleSecs;
    setBeanName(cacheName);
    
    // The map will have a bounded size determined by the maxItems member variable.
    @SuppressWarnings("rawtypes")
    CacheBuilder builder = CacheBuilder.newBuilder();
    
    if (useMaxItems)
    {
        builder.maximumSize(maxItems);
    }
    if (ttlSecs > 0)
    {
        builder.expireAfterWrite(ttlSecs, TimeUnit.SECONDS);
    }
    if (maxIdleSecs > 0)
    {
        builder.expireAfterAccess(maxIdleSecs, TimeUnit.SECONDS);
    }
    builder.concurrencyLevel(32);
    
    cache = (Cache<K, AbstractMap.SimpleImmutableEntry<K, V>>) builder.build();
}
 
Example 11
Source File: JaninoRelMetadataProvider.java    From Bats with Apache License 2.0 5 votes vote down vote up
private static <K, V> CacheBuilder<K, V> maxSize(CacheBuilder<K, V> builder,
    int size) {
  if (size >= 0) {
    builder.maximumSize(size);
  }
  return builder;
}
 
Example 12
Source File: JaninoRelMetadataProvider.java    From calcite with Apache License 2.0 5 votes vote down vote up
private static <K, V> CacheBuilder<K, V> maxSize(CacheBuilder<K, V> builder,
    int size) {
  if (size >= 0) {
    builder.maximumSize(size);
  }
  return builder;
}
 
Example 13
Source File: GuavaCacheFactory.java    From joyrpc with Apache License 2.0 5 votes vote down vote up
@Override
public <K, V> Cache<K, V> build(final String name, final CacheConfig<K, V> config) {
    CacheBuilder<Object, Object> cacheBuilder = CacheBuilder.newBuilder();
    if (config.getExpireAfterWrite() > 0) {
        cacheBuilder.expireAfterWrite(config.getExpireAfterWrite(), TimeUnit.MILLISECONDS);
    }
    cacheBuilder.maximumSize(config.getCapacity() > 0 ? config.getCapacity() : Long.MAX_VALUE);
    com.google.common.cache.Cache<K, CacheObject<V>> cache = cacheBuilder.build();
    return new GuavaCache<>(cache, config);
}
 
Example 14
Source File: SimpleCache.java    From pinpoint with Apache License 2.0 5 votes vote down vote up
private ConcurrentMap<T, Result> createCache(int maxCacheSize) {
    final CacheBuilder<Object, Object> cacheBuilder = CacheBuilder.newBuilder();
    cacheBuilder.concurrencyLevel(64);
    cacheBuilder.initialCapacity(maxCacheSize);
    cacheBuilder.maximumSize(maxCacheSize);
    Cache<T, Result> localCache = cacheBuilder.build();
    ConcurrentMap<T, Result> cache = localCache.asMap();
    return cache;
}
 
Example 15
Source File: JaninoRelMetadataProvider.java    From Quicksql with MIT License 5 votes vote down vote up
private static <K, V> CacheBuilder<K, V> maxSize(CacheBuilder<K, V> builder,
    int size) {
  if (size >= 0) {
    builder.maximumSize(size);
  }
  return builder;
}
 
Example 16
Source File: RateLimiter.java    From glowroot with Apache License 2.0 5 votes vote down vote up
public RateLimiter(int maximumSize, boolean recordStats) {
    CacheBuilder<Object, Object> cache = CacheBuilder.newBuilder()
            .expireAfterWrite(1, DAYS);
    if (maximumSize != NO_MAXIMUM_SIZE) {
        cache.maximumSize(maximumSize);
    }
    if (recordStats) {
        cache.recordStats();
    }
    acquiredRecently = cache.build();
}
 
Example 17
Source File: CachingHiveMetastore.java    From presto with Apache License 2.0 5 votes vote down vote up
private static CacheBuilder<Object, Object> newCacheBuilder(OptionalLong expiresAfterWriteMillis, OptionalLong refreshMillis, long maximumSize)
{
    CacheBuilder<Object, Object> cacheBuilder = CacheBuilder.newBuilder();
    if (expiresAfterWriteMillis.isPresent()) {
        cacheBuilder = cacheBuilder.expireAfterWrite(expiresAfterWriteMillis.getAsLong(), MILLISECONDS);
    }
    if (refreshMillis.isPresent() && (expiresAfterWriteMillis.isEmpty() || expiresAfterWriteMillis.getAsLong() > refreshMillis.getAsLong())) {
        cacheBuilder = cacheBuilder.refreshAfterWrite(refreshMillis.getAsLong(), MILLISECONDS);
    }
    cacheBuilder = cacheBuilder.maximumSize(maximumSize);
    return cacheBuilder;
}
 
Example 18
Source File: IrisSettings.java    From arcusplatform with Apache License 2.0 4 votes vote down vote up
public static CacheBuilder<Object,Object> configurableCacheBuilder(String base, CacheBuilder<Object,Object> builder) {
   // If there is a full cache specification then that overrides everything
   String spec = IrisSettings.getStringProperty(base + ".spec", "");
   if (!spec.isEmpty()) {
      return CacheBuilder.from(spec);
   }

   CacheBuilder<Object,Object> bld = builder;
   int concurrency = IrisSettings.getIntegerProperty(base + ".concurrency", -1);
   if (concurrency > 0) {
      bld = bld.concurrencyLevel(concurrency);
   }

   long write = IrisSettings.getLongProperty(base + ".expire.write", -1L);
   if (write > 0) {
      bld = bld.expireAfterWrite(write, TimeUnit.MILLISECONDS);
   }

   long access = IrisSettings.getLongProperty(base + ".expire.access", -1L);
   if (access > 0) {
      bld = bld.expireAfterAccess(access, TimeUnit.MILLISECONDS);
   }

   long refresh = IrisSettings.getLongProperty(base + ".refresh.write", -1L);
   if (refresh > 0) {
      bld = bld.refreshAfterWrite(refresh, TimeUnit.MILLISECONDS);
   }

   int initsz = IrisSettings.getIntegerProperty(base + ".initial.capacity", -1);
   if (initsz > 0) {
      bld = bld.initialCapacity(initsz);
   }

   int maxsz = IrisSettings.getIntegerProperty(base + ".max.size", -1);
   if (maxsz > 0) {
      bld = bld.maximumSize(maxsz);
   }

   boolean soft = IrisSettings.getBooleanProperty(base + ".soft.values", false);
   if (soft) {
      bld = bld.softValues();
   }

   return bld;
}
 
Example 19
Source File: ScriptService.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
@Inject
public ScriptService(Settings settings, Environment env, Set<ScriptEngineService> scriptEngines,
                     ResourceWatcherService resourceWatcherService, ScriptContextRegistry scriptContextRegistry) throws IOException {
    super(settings);
    this.parseFieldMatcher = new ParseFieldMatcher(settings);
    if (Strings.hasLength(settings.get(DISABLE_DYNAMIC_SCRIPTING_SETTING))) {
        throw new IllegalArgumentException(DISABLE_DYNAMIC_SCRIPTING_SETTING + " is not a supported setting, replace with fine-grained script settings. \n" +
                "Dynamic scripts can be enabled for all languages and all operations by replacing `script.disable_dynamic: false` with `script.inline: on` and `script.indexed: on` in elasticsearch.yml");
    }

    this.scriptEngines = scriptEngines;
    this.scriptContextRegistry = scriptContextRegistry;
    int cacheMaxSize = settings.getAsInt(SCRIPT_CACHE_SIZE_SETTING, SCRIPT_CACHE_SIZE_DEFAULT);
    TimeValue cacheExpire = settings.getAsTime(SCRIPT_CACHE_EXPIRE_SETTING, null);
    logger.debug("using script cache with max_size [{}], expire [{}]", cacheMaxSize, cacheExpire);

    this.defaultLang = settings.get(DEFAULT_SCRIPTING_LANGUAGE_SETTING, DEFAULT_LANG);

    CacheBuilder cacheBuilder = CacheBuilder.newBuilder();
    if (cacheMaxSize >= 0) {
        cacheBuilder.maximumSize(cacheMaxSize);
    }
    if (cacheExpire != null) {
        cacheBuilder.expireAfterAccess(cacheExpire.nanos(), TimeUnit.NANOSECONDS);
    }
    this.cache = cacheBuilder.removalListener(new ScriptCacheRemovalListener()).build();

    ImmutableMap.Builder<String, ScriptEngineService> enginesByLangBuilder = ImmutableMap.builder();
    ImmutableMap.Builder<String, ScriptEngineService> enginesByExtBuilder = ImmutableMap.builder();
    for (ScriptEngineService scriptEngine : scriptEngines) {
        for (String type : scriptEngine.types()) {
            enginesByLangBuilder.put(type, scriptEngine);
        }
        for (String ext : scriptEngine.extensions()) {
            enginesByExtBuilder.put(ext, scriptEngine);
        }
    }
    this.scriptEnginesByLang = enginesByLangBuilder.build();
    this.scriptEnginesByExt = enginesByExtBuilder.build();

    this.scriptModes = new ScriptModes(this.scriptEnginesByLang, scriptContextRegistry, settings);

    // add file watcher for static scripts
    scriptsDirectory = env.scriptsFile();
    if (logger.isTraceEnabled()) {
        logger.trace("Using scripts directory [{}] ", scriptsDirectory);
    }
    FileWatcher fileWatcher = new FileWatcher(scriptsDirectory);
    fileWatcher.addListener(new ScriptChangesListener());

    if (settings.getAsBoolean(SCRIPT_AUTO_RELOAD_ENABLED_SETTING, true)) {
        // automatic reload is enabled - register scripts
        resourceWatcherService.add(fileWatcher);
    } else {
        // automatic reload is disable just load scripts once
        fileWatcher.init();
    }
}
 
Example 20
Source File: AppendTrieDictionary.java    From kylin with Apache License 2.0 4 votes vote down vote up
public void init(String baseDir) throws IOException {
    this.baseDir = convertToAbsolutePath(baseDir);
    final GlobalDictStore globalDictStore = new GlobalDictHDFSStore(this.baseDir);
    Long[] versions = globalDictStore.listAllVersions();

    if (versions.length == 0) {
        this.metadata = new GlobalDictMetadata(0, 0, 0, 0, null, new TreeMap<AppendDictSliceKey, String>());
        return; // for the removed SegmentAppendTrieDictBuilder
    }

    final long latestVersion = versions[versions.length - 1];
    final Path latestVersionPath = globalDictStore.getVersionDir(latestVersion);
    this.metadata = globalDictStore.getMetadata(latestVersion);
    this.bytesConvert = metadata.bytesConverter;

    // see: https://github.com/google/guava/wiki/CachesExplained
    this.evictionThreshold = KylinConfig.getInstanceFromEnv().getDictionarySliceEvicationThreshold();
    int cacheMaximumSize = KylinConfig.getInstanceFromEnv().getCachedDictMaxSize();
    CacheBuilder cacheBuilder = CacheBuilder.newBuilder().softValues();

    // To be compatible with Guava 11
    boolean methodExists = methodExistsInClass(CacheBuilder.class, "recordStats");
    if (methodExists) {
        cacheBuilder = cacheBuilder.recordStats();
    }
    if (cacheMaximumSize > 0) {
        cacheBuilder = cacheBuilder.maximumSize(cacheMaximumSize);
        logger.info("Set dict cache maximum size to " + cacheMaximumSize);
    }
    this.dictCache = cacheBuilder
            .removalListener(new RemovalListener<AppendDictSliceKey, AppendDictSlice>() {
                @Override
                public void onRemoval(RemovalNotification<AppendDictSliceKey, AppendDictSlice> notification) {
                    logger.info("Evict slice with key {} and value {} caused by {}, size {}/{}",
                            notification.getKey(), notification.getValue(), notification.getCause(),
                            dictCache.size(), metadata.sliceFileMap.size());
                }
            }).build(new CacheLoader<AppendDictSliceKey, AppendDictSlice>() {
                @Override
                public AppendDictSlice load(AppendDictSliceKey key) throws Exception {
                    AppendDictSlice slice = globalDictStore.readSlice(latestVersionPath.toString(),
                            metadata.sliceFileMap.get(key));
                    logger.trace("Load slice with key {} and value {}", key, slice);
                    return slice;
                }
            });
}