Java Code Examples for com.google.common.collect.Sets#SetView

The following examples show how to use com.google.common.collect.Sets#SetView . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LeaseItem.java    From estatio with Apache License 2.0 6 votes vote down vote up
@Programmatic
    public boolean doRemove() {
        boolean canDelete = !isInvoicedUpon();
        if (canDelete && !getTerms().isEmpty()) {
            getTerms().first().doRemove();
        }
        if (canDelete) {
            final Sets.SetView<LeaseItemSource> itemSources = Sets.union(
                    Sets.newHashSet(leaseItemSourceRepository.findByItem(this)),
                    Sets.newHashSet(leaseItemSourceRepository.findBySourceItem(this)));

            for (LeaseItemSource leaseItemSource : itemSources){
                leaseItemSource.remove();
            }
            remove(this);
//            getContainer().remove(this);
            getContainer().flush();
        }
        return canDelete;
    }
 
Example 2
Source File: StoreMessageIdManager.java    From james-project with Apache License 2.0 6 votes vote down vote up
@Override
public DeleteResult delete(List<MessageId> messageIds, MailboxSession mailboxSession) throws MailboxException {
    MessageIdMapper messageIdMapper = mailboxSessionMapperFactory.getMessageIdMapper(mailboxSession);

    List<MailboxMessage> messageList = messageIdMapper.find(messageIds, MessageMapper.FetchType.Metadata);
    ImmutableSet<MailboxId> allowedMailboxIds = getAllowedMailboxIds(mailboxSession, messageList, Right.DeleteMessages);

    ImmutableSet<MessageId> accessibleMessages = messageList.stream()
        .filter(message -> allowedMailboxIds.contains(message.getMailboxId()))
        .map(MailboxMessage::getMessageId)
        .distinct()
        .collect(Guavate.toImmutableSet());
    Sets.SetView<MessageId> nonAccessibleMessages = Sets.difference(ImmutableSet.copyOf(messageIds), accessibleMessages);

    deleteWithPreHooks(messageIdMapper, messageList, mailboxSession);

    return DeleteResult.builder()
        .addDestroyed(accessibleMessages)
        .addNotFound(nonAccessibleMessages)
        .build();
}
 
Example 3
Source File: BlockFactoryTest.java    From protobuf-jetbrains-plugin with Apache License 2.0 6 votes vote down vote up
@Test
public void allParserRulesAreRegistered() throws Exception {
    Map<IElementType, BlockFactory.Factory> registry = BlockFactory.REGISTRY;

    Set<String> allRules = ImmutableSet.copyOf(ProtoParser.ruleNames);

    Set<String> registeredRules = new HashSet<>();
    for (IElementType type : registry.keySet()) {
        registeredRules.add(type.toString());
    }

    Sets.SetView<String> diff = Sets.difference(allRules, registeredRules);
    if (!diff.isEmpty()) {
        Assert.fail("Following rules are not registered: " + diff);
    }

}
 
Example 4
Source File: ReferenceInfos.java    From Elasticsearch with Apache License 2.0 6 votes vote down vote up
@Override
public void clusterChanged(ClusterChangedEvent event) {
    if (!event.metaDataChanged()) {
        return;
    }

    Set<String> newCurrentSchemas = getNewCurrentSchemas(event.state().metaData());
    synchronized (schemas) {
        Sets.SetView<String> nonBuiltInSchemas = Sets.difference(schemas.keySet(), builtInSchemas.keySet());
        Set<String> deleted = Sets.difference(nonBuiltInSchemas, newCurrentSchemas).immutableCopy();
        Set<String> added = Sets.difference(newCurrentSchemas, schemas.keySet()).immutableCopy();

        for (String deletedSchema : deleted) {
            try {
                schemas.remove(deletedSchema).close();
            } catch (Exception e) {
                LOGGER.error(e.getMessage(), e);
            }
        }
        for (String addedSchema : added) {
            schemas.put(addedSchema, getCustomSchemaInfo(addedSchema));
        }
    }
}
 
Example 5
Source File: TestKit.java    From exonum-java-binding with Apache License 2.0 5 votes vote down vote up
private void checkDeployedArtifactsAreUsed() {
  Set<ServiceArtifactId> serviceArtifactIds = services.keySet();
  Set<ServiceArtifactId> deployedArtifactIds = serviceArtifactFilenames.keySet();
  Sets.SetView<ServiceArtifactId> unusedArtifacts =
      Sets.difference(deployedArtifactIds, serviceArtifactIds);
  checkArgument(unusedArtifacts.isEmpty(),
      "Following service artifacts were deployed, but not used for service instantiation: %s",
      unusedArtifacts);
}
 
Example 6
Source File: ReasonerUtils.java    From grakn with GNU Affero General Public License v3.0 5 votes vote down vote up
/**
 * calculates map intersection by doing an intersection on key sets and accumulating the keys
 * @param m1 first operand
 * @param m2 second operand
 * @param <K> map key type
 * @param <V> map value type
 * @return map intersection
 */
public static <K, V> Multimap<K, V> multimapIntersection(Multimap<K, V> m1, Multimap<K, V> m2){
    Multimap<K, V> intersection = HashMultimap.create();
    Sets.SetView<K> keyIntersection = Sets.intersection(m1.keySet(), m2.keySet());
    Stream.concat(m1.entries().stream(), m2.entries().stream())
            .filter(e -> keyIntersection.contains(e.getKey()))
            .forEach(e -> intersection.put(e.getKey(), e.getValue()));
    return intersection;
}
 
Example 7
Source File: RegexMentionExtractorTest.java    From streams with Apache License 2.0 5 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testExtraction() {
    StreamsDatum datum = new StreamsDatum(activity, "Test");
    List<StreamsDatum> result = new RegexMentionsExtractor().process(datum);
    assertThat(result.size(), is(equalTo(1)));
    Activity output = (Activity)result.get(0).getDocument();
    Set<String> extracted = (Set) ExtensionUtil.getInstance().ensureExtensions(output).get(RegexMentionsExtractor.EXTENSION_KEY);
    Sets.SetView<String> diff = Sets.difference(extracted, mentions);
    assertThat(diff.size(), is(equalTo(0)));
}
 
Example 8
Source File: FlavorDomain.java    From buck with Apache License 2.0 5 votes vote down vote up
public Optional<Flavor> getFlavor(Set<Flavor> flavors) {
  Sets.SetView<Flavor> match = Sets.intersection(translation.keySet(), flavors);
  if (match.size() > 1) {
    throw new FlavorDomainException(
        String.format("multiple \"%s\" flavors: %s", name, Joiner.on(", ").join(match)));
  }

  return Optional.ofNullable(Iterables.getFirst(match, null));
}
 
Example 9
Source File: CoordinateSharer.java    From netcdf-java with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
public boolean nearlyEquals(RuntimeSmoosher that) {
  Sets.SetView<Long> common = Sets.intersection(this.coordSet, that.coordSet);
  int total = Math.min(this.runtime.getSize(), that.runtime.getSize());

  double nptsP = common.size() / (double) total;
  return (nptsP < smooshTolerence);
}
 
Example 10
Source File: RepositoryParamValidator.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
public Settings convertAndValidate(String type, Optional<GenericProperties> genericProperties, ParameterContext parameterContext) {
    TypeSettings typeSettings = this.typeSettings.get(type);
    if (typeSettings == null) {
        throw new IllegalArgumentException(String.format(Locale.ENGLISH, "Invalid repository type \"%s\"", type));
    }

    Map<String, SettingsApplier> allSettings = typeSettings.all();

    // create string settings applier for all dynamic settings
    Optional<GenericProperties> dynamicProperties = typeSettings.dynamicProperties(genericProperties);
    if (dynamicProperties.isPresent()) {
        // allSettings are immutable by default, copy map
        allSettings = Maps.newHashMap(allSettings);
        for (String key : dynamicProperties.get().properties().keySet()) {
            allSettings.put(key, new SettingsAppliers.StringSettingsApplier(new StringSetting(key, true)));
        }
    }

    // convert and validate all settings
    Settings settings = GenericPropertiesConverter.settingsFromProperties(
            genericProperties, parameterContext, allSettings).build();

    Set<String> names = settings.getAsMap().keySet();
    Sets.SetView<String> missingRequiredSettings = Sets.difference(typeSettings.required().keySet(), names);
    if (!missingRequiredSettings.isEmpty()) {
        throw new IllegalArgumentException(String.format(Locale.ENGLISH,
                "The following required parameters are missing to create a repository of type \"%s\": [%s]",
                type, Joiner.on(", ").join(missingRequiredSettings)));
    }

    return settings;
}
 
Example 11
Source File: Schemas.java    From crate with Apache License 2.0 5 votes vote down vote up
@Override
public void clusterChanged(ClusterChangedEvent event) {
    if (!event.metaDataChanged()) {
        return;
    }

    Set<String> newCurrentSchemas = getNewCurrentSchemas(event.state().metaData());
    synchronized (schemas) {
        Sets.SetView<String> nonBuiltInSchemas = Sets.difference(schemas.keySet(), builtInSchemas.keySet());
        Set<String> deleted = Sets.difference(nonBuiltInSchemas, newCurrentSchemas).immutableCopy();
        Set<String> added = Sets.difference(newCurrentSchemas, schemas.keySet()).immutableCopy();

        for (String deletedSchema : deleted) {
            try {
                schemas.remove(deletedSchema).close();
            } catch (Exception e) {
                LOGGER.error(e.getMessage(), e);
            }
        }

        for (String addedSchema : added) {
            schemas.put(addedSchema, getCustomSchemaInfo(addedSchema));
        }

        // update all existing schemas
        for (SchemaInfo schemaInfo : this) {
            schemaInfo.update(event);
        }
    }
}
 
Example 12
Source File: AbstractColumnDetailChecker.java    From qconfig with MIT License 5 votes vote down vote up
@Override
public void check(ObjectNode node) {
    Iterator<String> iterator = node.fieldNames();
    ImmutableSet<String> attributes = ImmutableSet.copyOf(iterator);

    Set<String> minAttributes = getMinAttributes();
    Sets.SetView<String> require = Sets.difference(minAttributes, attributes);
    Preconditions.checkArgument(require.isEmpty(), "[%s]还缺少属性%s", name(), require);

    Set<String> maxAttributes = getMaxAttributes();
    Sets.SetView<String> illegal = Sets.difference(attributes, maxAttributes);
    Preconditions.checkArgument(illegal.isEmpty(), "[%s]包含无效属性%s", name(), illegal);

    Context context = doCheck(node);
    JsonNode defaultNode = node.get(TemplateContants.DEFAULT);
    if (defaultNode != null) {
        Preconditions.checkArgument(defaultNode.isTextual(), "default必须是一个文本属性");
        String defaultText = defaultNode.asText();
        Preconditions.checkArgument(defaultText.trim().length() == defaultText.length(), "[%s]默认值前后不能有空格", name());
        if (!Strings.isNullOrEmpty(defaultText)) {
            Preconditions.checkArgument(context.isLegalValue(defaultText), "[%s]默认值[%s]无效", name(), defaultText);
        }
    }
    JsonNode readonlyNode = node.get(TemplateContants.READONLY);
    if (readonlyNode != null) {
        Preconditions.checkArgument(readonlyNode.isBoolean(), "readonly必须是一个布尔属性");
        if (readonlyNode.asBoolean()) {
            Preconditions.checkArgument(defaultNode != null && !Strings.isNullOrEmpty(defaultNode.asText()), "只读只在有默认值时生效");
        }
    }
}
 
Example 13
Source File: NullAway.java    From NullAway with MIT License 4 votes vote down vote up
/**
 * @param enclosingClassPath TreePath to class
 * @param state visitor state
 * @return a map from each initializer <em>i</em> to the fields known to be initialized before
 *     <em>i</em> executes
 */
private Multimap<Tree, Element> computeTree2Init(
    TreePath enclosingClassPath, VisitorState state) {
  ClassTree enclosingClass = (ClassTree) enclosingClassPath.getLeaf();
  ImmutableMultimap.Builder<Tree, Element> builder = ImmutableMultimap.builder();
  // NOTE: this set includes both instance and static fields
  Set<Element> initThusFar = new LinkedHashSet<>();
  Set<MethodTree> constructors = new LinkedHashSet<>();
  AccessPathNullnessAnalysis nullnessAnalysis = getNullnessAnalysis(state);
  // NOTE: we assume the members are returned in their syntactic order.  This has held
  // true in our testing
  for (Tree memberTree : enclosingClass.getMembers()) {
    if (memberTree instanceof VariableTree || memberTree instanceof BlockTree) {
      // putAll does not keep a reference to initThusFar, so we don't need to make a copy here
      builder.putAll(memberTree, initThusFar);
    }
    if (memberTree instanceof BlockTree) {
      BlockTree blockTree = (BlockTree) memberTree;
      // add whatever gets initialized here
      TreePath memberPath = new TreePath(enclosingClassPath, memberTree);
      if (blockTree.isStatic()) {
        initThusFar.addAll(
            nullnessAnalysis.getNonnullStaticFieldsAtExit(memberPath, state.context));
      } else {
        initThusFar.addAll(
            nullnessAnalysis.getNonnullFieldsOfReceiverAtExit(memberPath, state.context));
      }
    }
    if (memberTree instanceof MethodTree) {
      MethodTree methodTree = (MethodTree) memberTree;
      if (isConstructor(methodTree)) {
        constructors.add(methodTree);
      }
    }
  }
  // all the initializer blocks have run before any code inside a constructor
  constructors.stream().forEach((c) -> builder.putAll(c, initThusFar));
  Symbol.ClassSymbol classSymbol = ASTHelpers.getSymbol(enclosingClass);
  FieldInitEntities entities = class2Entities.get(classSymbol);
  if (entities.instanceInitializerMethods().size() == 1) {
    MethodTree initMethod = entities.instanceInitializerMethods().iterator().next();
    // collect the fields that may not be initialized by *some* constructor NC
    Set<Symbol> constructorUninitSymbols = class2ConstructorUninit.get(classSymbol);
    // fields initialized after constructors is initThusFar + (nonNullFields - constructorUninit)
    Sets.SetView<Element> initAfterConstructors =
        Sets.union(
            initThusFar,
            Sets.difference(entities.nonnullInstanceFields(), constructorUninitSymbols));
    builder.putAll(initMethod, initAfterConstructors);
  }
  if (entities.staticInitializerMethods().size() == 1) {
    MethodTree staticInitMethod = entities.staticInitializerMethods().iterator().next();
    // constructors aren't relevant here; just use initThusFar
    builder.putAll(staticInitMethod, initThusFar);
  }
  return builder.build();
}
 
Example 14
Source File: DefaultRegisteredServiceAccessStrategy.java    From springboot-shiro-cas-mybatis with MIT License 4 votes vote down vote up
/**
 * {@inheritDoc}
 *
 * Verify presence of service required attributes.
 * <ul>
 *     <li>If no required attributes are specified, authz is granted.</li>
 *     <li>If ALL required attributes must be present, and the principal contains all and there is
 *     at least one attribute value that matches the required, authz is granted.</li>
 *     <li>If ALL required attributes don't have to be present, and there is at least
 *     one principal attribute present whose value matches the required, authz is granted.</li>
 *     <li>Otherwise, access is denied</li>
 * </ul>
 * Note that comparison of principal/required attributes is case-sensitive. Exact matches are required
 * for any individual attribute value.
 */
@Override
public boolean doPrincipalAttributesAllowServiceAccess(final Map<String, Object> principalAttributes) {
    if (this.requiredAttributes.isEmpty()) {
        logger.debug("No required attributes are specified");
        return true;
    }
    if (principalAttributes.isEmpty()) {
        logger.debug("No principal attributes are found to satisfy attribute requirements");
        return false;
    }

    if (principalAttributes.size() < this.requiredAttributes.size()) {
        logger.debug("The size of the principal attributes that are [{}] does not match requirements, "
                + "which means the principal is not carrying enough data to grant authorization",
                principalAttributes);
        return false;
    }

    final Map<String, Set<String>> requiredAttrs = this.getRequiredAttributes();
    logger.debug("These required attributes [{}] are examined against [{}] before service can proceed.",
            requiredAttrs, principalAttributes);

    final Sets.SetView<String> difference = Sets.intersection(requiredAttrs.keySet(), principalAttributes.keySet());
    final Set<String> copy = difference.immutableCopy();

    if (this.requireAllAttributes && copy.size() < this.requiredAttributes.size()) {
        logger.debug("Not all required attributes are available to the principal");
        return false;
    }

    for (final String key : copy) {
        final Set<?> requiredValues = this.requiredAttributes.get(key);
        final Set<?> availableValues;

        final Object objVal = principalAttributes.get(key);
        if (objVal instanceof Collection) {
            final Collection valCol = (Collection) objVal;
            availableValues = Sets.newHashSet(valCol.toArray());
        } else {
            availableValues = Collections.singleton(objVal);
        }

        final Sets.SetView<?> differenceInValues = Sets.intersection(availableValues, requiredValues);
        if (!differenceInValues.isEmpty()) {
            logger.info("Principal is authorized to access the service");
            return true;
        }
    }
    logger.info("Principal is denied access as the required attributes for the registered service are missing");
    return false;
}
 
Example 15
Source File: ProxyTaskGroupTests.java    From autorest-clientruntime-for-java with MIT License 4 votes vote down vote up
@Test
public void testSampleTaskGroupSanity() {
    // Prepare sample group
    //
    /**
     *
     *   |------------------->B------------|
     *   |                                 |
     *   |                                 ↓
     *   F            ------->C----------->A
     *   |            |                    ^
     *   |            |                    |
     *   |------------>E                   |
     *                |                    |
     *                |                    |
     *                ------->D-------------
     */
    final List<String> groupItems = new ArrayList<>();
    TaskGroup group = createSampleTaskGroup("A", "B",
            "C", "D",
            "E", "F",
            groupItems);

    // Invocation of group should invoke all the tasks
    //
    group.invokeAsync(group.newInvocationContext())
            .subscribe(new Action1<Indexable>() {
                @Override
                public void call(Indexable value) {
                    StringIndexable stringIndexable = toStringIndexable(value);
                    Assert.assertTrue(groupItems.contains(stringIndexable.str()));
                    groupItems.remove(stringIndexable.str());
                }
            });

    Assert.assertEquals(0, groupItems.size());

    Map<String, Set<String>> shouldNotSee = new HashMap<>();

    // NotSeen entries for group-1
    shouldNotSee.put("A", new HashSet<String>());
    shouldNotSee.get("A").addAll(Arrays.asList(new String[] {"B", "C", "D", "E", "F"}));

    shouldNotSee.put("B", new HashSet<String>());
    shouldNotSee.get("B").addAll(Arrays.asList(new String[] {"F"}));

    shouldNotSee.put("C", new HashSet<String>());
    shouldNotSee.get("C").addAll(Arrays.asList(new String[] {"E", "F"}));

    shouldNotSee.put("D", new HashSet<String>());
    shouldNotSee.get("D").addAll(Arrays.asList(new String[] {"E", "F"}));

    shouldNotSee.put("E", new HashSet<String>());
    shouldNotSee.get("E").addAll(Arrays.asList(new String[] {"F"}));

    shouldNotSee.put("F", new HashSet<String>());
    shouldNotSee.get("F").addAll(Arrays.asList(new String[] {}));

    Set<String> seen = new HashSet<>();
    // Test invocation order for group
    //
    group.prepareForEnumeration();
    for (TaskGroupEntry<TaskItem> entry = group.getNext(); entry != null; entry = group.getNext()) {
        Sets.SetView<String> common = Sets.intersection(shouldNotSee.get(entry.key()), seen);
        if (common.size() > 0) {
            Assert.assertTrue("The entries " + common + " must be emitted before " + entry.key(), false);
        }
        seen.add(entry.key());
        group.reportCompletion(entry);
    }

    Assert.assertEquals(6, seen.size()); // 1 groups with 6 nodes
    Set<String> expectedToSee = new HashSet<>();
    expectedToSee.addAll(Arrays.asList(new String[]  {"A", "B", "C", "D", "E", "F"}));
    Sets.SetView<String> diff = Sets.difference(seen, expectedToSee);
    Assert.assertEquals(0, diff.size());
}
 
Example 16
Source File: HBaseTransactionPruningPlugin.java    From phoenix-tephra with Apache License 2.0 4 votes vote down vote up
/**
 * Try to find the latest set of regions in which all regions have been major compacted, and
 * compute prune upper bound from them. Starting from newest to oldest, this looks into the
 * region set that has been saved periodically, and joins it with the prune upper bound data
 * for a region recorded after a major compaction.
 *
 * @param timeRegions the latest set of regions
 * @return prune upper bound
 * @throws IOException when not able to talk to HBase
 */
private long computePruneUpperBound(TimeRegions timeRegions) throws IOException {
  // Get the tables for the current time from the latest regions set
  final Set<TableName> existingTables = getTableNamesForRegions(timeRegions.getRegions());
  LOG.debug("Tables for time {} = {}", timeRegions.getTime(), existingTables);

  do {
    LOG.debug("Computing prune upper bound for {}", timeRegions);
    SortedSet<byte[]> transactionalRegions = timeRegions.getRegions();
    long time = timeRegions.getTime();

    long inactiveTransactionBound = dataJanitorState.getInactiveTransactionBoundForTime(time);
    LOG.debug("Got inactive transaction bound {}", inactiveTransactionBound);
    // If inactiveTransactionBound is not recorded then that means the data is not complete for these regions
    if (inactiveTransactionBound == -1) {
      if (LOG.isDebugEnabled()) {
        LOG.debug("Ignoring regions for time {} as no inactiveTransactionBound was found for that time, " +
                    "and hence the data must be incomplete", time);
      }
      continue;
    }

    // Remove non-existing tables from the transactional regions set, so that we don't lookup prune upper bounds
    // for them. Since the deleted tables do not exist anymore, there is no need to make sure they have been
    // compacted. This ensures that transient tables do not block pruning progress.
    transactionalRegions = filterDeletedTableRegions(existingTables, transactionalRegions);
    if (LOG.isDebugEnabled()) {
      LOG.debug("Transactional regions after removing the regions of non-existing tables = {}",
                Iterables.transform(transactionalRegions, TimeRegions.BYTE_ARR_TO_STRING_FN));
    }

    // Get the prune upper bounds for all the transactional regions
    Map<byte[], Long> pruneUpperBoundRegions =
      dataJanitorState.getPruneUpperBoundForRegions(transactionalRegions);
    logPruneUpperBoundRegions(pruneUpperBoundRegions);

    // Use inactiveTransactionBound as the prune upper bound for the empty regions since the regions that are
    // recorded as empty after inactiveTransactionBoundTime will not have invalid data
    // for transactions started on or before inactiveTransactionBoundTime
    pruneUpperBoundRegions = handleEmptyRegions(inactiveTransactionBound, transactionalRegions,
                                                pruneUpperBoundRegions);

    // If prune upper bounds are found for all the transactional regions, then compute the prune upper bound
    // across all regions
    if (!transactionalRegions.isEmpty() &&
      pruneUpperBoundRegions.size() == transactionalRegions.size()) {
      Long minPruneUpperBoundRegions = Collections.min(pruneUpperBoundRegions.values());
      long pruneUpperBound = Math.min(inactiveTransactionBound, minPruneUpperBoundRegions);
      LOG.debug("Found prune upper bound {} for time {}", pruneUpperBound, time);
      return pruneUpperBound;
    } else {
      if (LOG.isDebugEnabled()) {
        Sets.SetView<byte[]> difference =
          Sets.difference(transactionalRegions, pruneUpperBoundRegions.keySet());
        LOG.debug("Ignoring regions for time {} because the following regions did not record a pruneUpperBound: {}",
                  time, Iterables.transform(difference, TimeRegions.BYTE_ARR_TO_STRING_FN));
      }
    }

    timeRegions = dataJanitorState.getRegionsOnOrBeforeTime(time - 1);
  } while (timeRegions != null);
  return -1;
}
 
Example 17
Source File: BuildInfoRecorder.java    From buck with Apache License 2.0 4 votes vote down vote up
public void assertOnlyHasKeys(String... keys) {
  Sets.SetView<String> difference =
      Sets.difference(buildMetadata.keySet(), ImmutableSet.copyOf(keys));
  Preconditions.checkState(
      difference.isEmpty(), "Contained extra keys: " + Joiner.on(":").join(difference));
}
 
Example 18
Source File: HBaseTransactionPruningPlugin.java    From phoenix-tephra with Apache License 2.0 4 votes vote down vote up
/**
 * Try to find the latest set of regions in which all regions have been major compacted, and
 * compute prune upper bound from them. Starting from newest to oldest, this looks into the
 * region set that has been saved periodically, and joins it with the prune upper bound data
 * for a region recorded after a major compaction.
 *
 * @param timeRegions the latest set of regions
 * @return prune upper bound
 * @throws IOException when not able to talk to HBase
 */
private long computePruneUpperBound(TimeRegions timeRegions) throws IOException {
  // Get the tables for the current time from the latest regions set
  final Set<TableName> existingTables = getTableNamesForRegions(timeRegions.getRegions());
  LOG.debug("Tables for time {} = {}", timeRegions.getTime(), existingTables);

  do {
    LOG.debug("Computing prune upper bound for {}", timeRegions);
    SortedSet<byte[]> transactionalRegions = timeRegions.getRegions();
    long time = timeRegions.getTime();

    long inactiveTransactionBound = dataJanitorState.getInactiveTransactionBoundForTime(time);
    LOG.debug("Got inactive transaction bound {}", inactiveTransactionBound);
    // If inactiveTransactionBound is not recorded then that means the data is not complete for these regions
    if (inactiveTransactionBound == -1) {
      if (LOG.isDebugEnabled()) {
        LOG.debug("Ignoring regions for time {} as no inactiveTransactionBound was found for that time, " +
                    "and hence the data must be incomplete", time);
      }
      continue;
    }

    // Remove non-existing tables from the transactional regions set, so that we don't lookup prune upper bounds
    // for them. Since the deleted tables do not exist anymore, there is no need to make sure they have been
    // compacted. This ensures that transient tables do not block pruning progress.
    transactionalRegions = filterDeletedTableRegions(existingTables, transactionalRegions);
    if (LOG.isDebugEnabled()) {
      LOG.debug("Transactional regions after removing the regions of non-existing tables = {}",
                Iterables.transform(transactionalRegions, TimeRegions.BYTE_ARR_TO_STRING_FN));
    }

    // Get the prune upper bounds for all the transactional regions
    Map<byte[], Long> pruneUpperBoundRegions =
      dataJanitorState.getPruneUpperBoundForRegions(transactionalRegions);
    logPruneUpperBoundRegions(pruneUpperBoundRegions);

    // Use inactiveTransactionBound as the prune upper bound for the empty regions since the regions that are
    // recorded as empty after inactiveTransactionBoundTime will not have invalid data
    // for transactions started on or before inactiveTransactionBoundTime
    pruneUpperBoundRegions = handleEmptyRegions(inactiveTransactionBound, transactionalRegions,
                                                pruneUpperBoundRegions);

    // If prune upper bounds are found for all the transactional regions, then compute the prune upper bound
    // across all regions
    if (!transactionalRegions.isEmpty() &&
      pruneUpperBoundRegions.size() == transactionalRegions.size()) {
      Long minPruneUpperBoundRegions = Collections.min(pruneUpperBoundRegions.values());
      long pruneUpperBound = Math.min(inactiveTransactionBound, minPruneUpperBoundRegions);
      LOG.debug("Found prune upper bound {} for time {}", pruneUpperBound, time);
      return pruneUpperBound;
    } else {
      if (LOG.isDebugEnabled()) {
        Sets.SetView<byte[]> difference =
          Sets.difference(transactionalRegions, pruneUpperBoundRegions.keySet());
        LOG.debug("Ignoring regions for time {} because the following regions did not record a pruneUpperBound: {}",
                  time, Iterables.transform(difference, TimeRegions.BYTE_ARR_TO_STRING_FN));
      }
    }

    timeRegions = dataJanitorState.getRegionsOnOrBeforeTime(time - 1);
  } while (timeRegions != null);
  return -1;
}
 
Example 19
Source File: HBaseTransactionPruningPlugin.java    From phoenix-tephra with Apache License 2.0 4 votes vote down vote up
/**
 * Try to find the latest set of regions in which all regions have been major compacted, and
 * compute prune upper bound from them. Starting from newest to oldest, this looks into the
 * region set that has been saved periodically, and joins it with the prune upper bound data
 * for a region recorded after a major compaction.
 *
 * @param timeRegions the latest set of regions
 * @return prune upper bound
 * @throws IOException when not able to talk to HBase
 */
private long computePruneUpperBound(TimeRegions timeRegions) throws IOException {
  // Get the tables for the current time from the latest regions set
  final Set<TableName> existingTables = getTableNamesForRegions(timeRegions.getRegions());
  LOG.debug("Tables for time {} = {}", timeRegions.getTime(), existingTables);

  do {
    LOG.debug("Computing prune upper bound for {}", timeRegions);
    SortedSet<byte[]> transactionalRegions = timeRegions.getRegions();
    long time = timeRegions.getTime();

    long inactiveTransactionBound = dataJanitorState.getInactiveTransactionBoundForTime(time);
    LOG.debug("Got inactive transaction bound {}", inactiveTransactionBound);
    // If inactiveTransactionBound is not recorded then that means the data is not complete for these regions
    if (inactiveTransactionBound == -1) {
      if (LOG.isDebugEnabled()) {
        LOG.debug("Ignoring regions for time {} as no inactiveTransactionBound was found for that time, " +
                    "and hence the data must be incomplete", time);
      }
      continue;
    }

    // Remove non-existing tables from the transactional regions set, so that we don't lookup prune upper bounds
    // for them. Since the deleted tables do not exist anymore, there is no need to make sure they have been
    // compacted. This ensures that transient tables do not block pruning progress.
    transactionalRegions = filterDeletedTableRegions(existingTables, transactionalRegions);
    if (LOG.isDebugEnabled()) {
      LOG.debug("Transactional regions after removing the regions of non-existing tables = {}",
                Iterables.transform(transactionalRegions, TimeRegions.BYTE_ARR_TO_STRING_FN));
    }

    // Get the prune upper bounds for all the transactional regions
    Map<byte[], Long> pruneUpperBoundRegions =
      dataJanitorState.getPruneUpperBoundForRegions(transactionalRegions);
    logPruneUpperBoundRegions(pruneUpperBoundRegions);

    // Use inactiveTransactionBound as the prune upper bound for the empty regions since the regions that are
    // recorded as empty after inactiveTransactionBoundTime will not have invalid data
    // for transactions started on or before inactiveTransactionBoundTime
    pruneUpperBoundRegions = handleEmptyRegions(inactiveTransactionBound, transactionalRegions,
                                                pruneUpperBoundRegions);

    // If prune upper bounds are found for all the transactional regions, then compute the prune upper bound
    // across all regions
    if (!transactionalRegions.isEmpty() &&
      pruneUpperBoundRegions.size() == transactionalRegions.size()) {
      Long minPruneUpperBoundRegions = Collections.min(pruneUpperBoundRegions.values());
      long pruneUpperBound = Math.min(inactiveTransactionBound, minPruneUpperBoundRegions);
      LOG.debug("Found prune upper bound {} for time {}", pruneUpperBound, time);
      return pruneUpperBound;
    } else {
      if (LOG.isDebugEnabled()) {
        Sets.SetView<byte[]> difference =
          Sets.difference(transactionalRegions, pruneUpperBoundRegions.keySet());
        LOG.debug("Ignoring regions for time {} because the following regions did not record a pruneUpperBound: {}",
                  time, Iterables.transform(difference, TimeRegions.BYTE_ARR_TO_STRING_FN));
      }
    }

    timeRegions = dataJanitorState.getRegionsOnOrBeforeTime(time - 1);
  } while (timeRegions != null);
  return -1;
}
 
Example 20
Source File: DefaultJavaLibrary.java    From buck with Apache License 2.0 4 votes vote down vote up
protected DefaultJavaLibrary(
    BuildTarget buildTarget,
    ProjectFilesystem projectFilesystem,
    JarBuildStepsFactory jarBuildStepsFactory,
    SourcePathRuleFinder ruleFinder,
    Optional<SourcePath> proguardConfig,
    SortedSet<BuildRule> firstOrderPackageableDeps,
    ImmutableSortedSet<BuildRule> fullJarExportedDeps,
    ImmutableSortedSet<BuildRule> fullJarProvidedDeps,
    ImmutableSortedSet<BuildRule> fullJarExportedProvidedDeps,
    ImmutableSortedSet<BuildRule> runtimeDeps,
    @Nullable BuildTarget abiJar,
    @Nullable BuildTarget sourceOnlyAbiJar,
    Optional<String> mavenCoords,
    ImmutableSortedSet<BuildTarget> tests,
    boolean requiredForSourceOnlyAbi,
    UnusedDependenciesAction unusedDependenciesAction,
    Optional<UnusedDependenciesFinderFactory> unusedDependenciesFinderFactory,
    @Nullable CalculateSourceAbi sourceAbi,
    boolean isDesugarEnabled,
    boolean isInterfaceMethodsDesugarEnabled,
    boolean neverMarkAsUnusedDependency) {
  super(
      buildTarget,
      projectFilesystem,
      ruleFinder,
      new DefaultJavaLibraryBuildable(
          buildTarget,
          projectFilesystem,
          jarBuildStepsFactory,
          unusedDependenciesAction,
          unusedDependenciesFinderFactory,
          sourceAbi));
  this.ruleFinder = ruleFinder;
  this.sourcePathForOutputJar =
      Optional.ofNullable(
          jarBuildStepsFactory.getSourcePathToOutput(buildTarget, projectFilesystem));
  this.sourcePathForGeneratedAnnotationPath =
      Optional.ofNullable(
          jarBuildStepsFactory.getSourcePathToGeneratedAnnotationPath(
              buildTarget, projectFilesystem));

  this.sourceAbi = sourceAbi;
  this.isDesugarEnabled = isDesugarEnabled;
  this.isInterfaceMethodsDesugarEnabled = isInterfaceMethodsDesugarEnabled;

  // Exported deps are meant to be forwarded onto the CLASSPATH for dependents,
  // and so only make sense for java library types.
  validateExportedDepsType(buildTarget, fullJarExportedDeps);
  validateExportedDepsType(buildTarget, fullJarExportedProvidedDeps);

  Sets.SetView<BuildRule> missingExports =
      Sets.difference(fullJarExportedDeps, firstOrderPackageableDeps);
  // Exports should have been copied over to declared before invoking this constructor
  Preconditions.checkState(missingExports.isEmpty());

  this.proguardConfig = proguardConfig;
  this.firstOrderPackageableDeps = firstOrderPackageableDeps;
  this.fullJarExportedDeps = fullJarExportedDeps;
  this.fullJarProvidedDeps = fullJarProvidedDeps;
  this.fullJarExportedProvidedDeps = fullJarExportedProvidedDeps;
  this.runtimeDeps = runtimeDeps;
  this.mavenCoords = mavenCoords;
  this.tests = tests;
  this.requiredForSourceOnlyAbi = requiredForSourceOnlyAbi;

  this.javaAbiInfo =
      getSourcePathToOutput() == null
          ? new EmptyJavaAbiInfo(buildTarget)
          : new DefaultJavaAbiInfo(getSourcePathToOutput());
  this.abiJar = abiJar;
  this.sourceOnlyAbiJar = sourceOnlyAbiJar;
  this.neverMarkAsUnusedDependency = neverMarkAsUnusedDependency;

  this.outputClasspathEntriesSupplier =
      MoreSuppliers.memoize(
          () ->
              JavaLibraryClasspathProvider.getOutputClasspathJars(
                  DefaultJavaLibrary.this, sourcePathForOutputJar()));

  this.transitiveClasspathsSupplier =
      MoreSuppliers.memoize(
          () ->
              JavaLibraryClasspathProvider.getClasspathsFromLibraries(
                  getTransitiveClasspathDeps()));

  this.transitiveClasspathDepsSupplier =
      MoreSuppliers.memoize(
          () -> JavaLibraryClasspathProvider.getTransitiveClasspathDeps(DefaultJavaLibrary.this));

  this.buildOutputInitializer = new BuildOutputInitializer<>(buildTarget, this);
  this.javaClassHashesProvider =
      new DefaultJavaClassHashesProvider(
          ExplicitBuildTargetSourcePath.of(
              buildTarget, getBuildable().getPathToClassHashes(projectFilesystem)));
}