Java Code Examples for com.google.common.collect.Multimap#get()

The following examples show how to use com.google.common.collect.Multimap#get() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ScanPrinterUtils.java    From disconf with Apache License 2.0 6 votes vote down vote up
/**
 * 打印出StoreMap的数据
 */
public static void printStoreMap(Reflections reflections) {

    LOGGER.info("Now we will print store map......");

    Store store = reflections.getStore();
    Map<String/* indexName */, Multimap<String, String>> storeMap = store.getStoreMap();
    for (String indexName : storeMap.keySet()) {

        LOGGER.info("====================================");
        LOGGER.info("indexName:" + indexName);

        Multimap<String, String> multimap = storeMap.get(indexName);

        for (String firstName : multimap.keySet()) {
            Collection<String> lastNames = multimap.get(firstName);
            LOGGER.info("\t\t" + firstName + ": " + lastNames);
        }

    }
}
 
Example 2
Source File: JsonConfigWriter.java    From endpoints-java with Apache License 2.0 6 votes vote down vote up
@Override
public Map<ApiKey, String> writeConfig(Iterable<? extends ApiConfig> configs)
    throws ApiConfigException {
  Multimap<ApiKey, ? extends ApiConfig> apisByKey = Multimaps.index(configs,
      new Function<ApiConfig, ApiKey>() {
        @Override public ApiKey apply(ApiConfig config) {
          return config.getApiKey();
        }
      });

  // This *must* retain the order of apisByKey so the lily_java_api BUILD rule has predictable
  // output order.
  Map<ApiKey, String> results = Maps.newLinkedHashMap();
  for (ApiKey apiKey : apisByKey.keySet()) {
    Collection<? extends ApiConfig> apiConfigs = apisByKey.get(apiKey);
    validator.validate(apiConfigs);
    results.put(apiKey, generateForApi(apiConfigs));
  }
  return results;
}
 
Example 3
Source File: L2CacheRepositoryDecorator.java    From molgenis with GNU Lesser General Public License v3.0 6 votes vote down vote up
private List<Entity> findAllCache(List<Object> ids, Fetch fetch) {
  if (TransactionSynchronizationManager.isCurrentTransactionReadOnly()) {
    List<Entity> unorderedEntities = l2Cache.getBatch(delegate(), ids, fetch);
    return createOrderedEntities(ids, unorderedEntities);
  } else {
    String entityTypeId = getEntityType().getId();
    Multimap<Boolean, Object> partitionedIds =
        Multimaps.index(
            ids, id -> transactionInformation.isEntityDirty(EntityKey.create(entityTypeId, id)));
    Collection<Object> cleanIds = partitionedIds.get(false);
    Collection<Object> dirtyIds = partitionedIds.get(true);

    List<Entity> batch = l2Cache.getBatch(delegate(), cleanIds, fetch);
    Map<Object, Entity> result = newHashMap(uniqueIndex(batch, Entity::getIdValue));
    result.putAll(
        delegate().findAll(dirtyIds.stream(), fetch).collect(toMap(Entity::getIdValue, e -> e)));

    return ids.stream().filter(result::containsKey).map(result::get).collect(toList());
  }
}
 
Example 4
Source File: DiscoveryLogic.java    From datawave with Apache License 2.0 6 votes vote down vote up
/**
 * This attempts to normalize all of the {@code <value, field>} tuples with the corresponding {@code <field, normalizer>} tuple. The Normalization object
 * will determine whether or not a regex or literal is being normalized.
 *
 * See the {@link PatternNormalization} and {@link LiteralNormalization} implementations.
 *
 * @param normalization
 * @param valuesToFields
 * @param dataTypeMap
 * @return
 */
public static Multimap<String,String> normalize(Normalization normalization, Multimap<String,String> valuesToFields, Multimap<String,Type<?>> dataTypeMap) {
    Multimap<String,String> normalizedValuesToFields = HashMultimap.create();
    for (Entry<String,String> valueAndField : valuesToFields.entries()) {
        String value = valueAndField.getKey(), field = valueAndField.getValue();
        for (Type<?> dataType : dataTypeMap.get(field)) {
            try {
                log.debug("Attempting to normalize [" + value + "] with [" + dataType.getClass() + "]");
                String normalized = normalization.normalize(dataType, field, value);
                normalizedValuesToFields.put(normalized, field);
                log.debug("Normalization succeeded!");
            } catch (Exception exception) {
                log.debug("Normalization failed.");
            }
        }
    }
    return normalizedValuesToFields;
}
 
Example 5
Source File: TraceFormatter.java    From yql-plus with Apache License 2.0 6 votes vote down vote up
private static void dumpTree(CodeOutput out, Multimap<Integer, TraceEntry> childmap, Multimap<Integer, TraceLogEntry> logmap, Collection<TraceEntry> traceEntries) {
    for (TraceEntry entry : traceEntries) {
        out.println(String.format("%s : %s (%.2f - %.2f) (%.2f)", entry.getGroup(), entry.getName(), entry.getStartMilliseconds(), entry.getEndMilliseconds(), entry.getDurationMilliseconds()));
        out.indent();
        for (TraceLogEntry log : logmap.get(entry.getId())) {
            out.println("%6d [%s]: %s", TimeUnit.NANOSECONDS.toMicros(log.getTicks()), log.getLevel(), log);
            if (log.get() instanceof ThrowableEvent) {
                ThrowableEvent event = (ThrowableEvent) log.get();
                out.indent();
                out.println("Exception: %s", event.message);
                out.indent();
                CodeFormatter.writeException(event.throwable, out);
                out.dedent();
                out.dedent();
            }
        }
        dumpTree(out, childmap, logmap, childmap.get(entry.getId()));
        out.dedent();
    }
}
 
Example 6
Source File: GCRatioSupplier.java    From hmftools with GNU General Public License v3.0 6 votes vote down vote up
GCRatioSupplier(@NotNull final Multimap<Chromosome, GCProfile> gcProfiles, @NotNull final Multimap<Chromosome, CobaltCount> counts) {
    final GenomeRegionSelector<GCProfile> gcProfileSelector = GenomeRegionSelectorFactory.createImproved(gcProfiles);

    final GCRatioNormalization tumorRatiosBuilder = new GCRatioNormalization();
    final GCRatioNormalization referenceRatiosBuilder = new GCRatioNormalization();

    for (Chromosome chromosome : counts.keySet()) {
        for (CobaltCount cobaltPosition : counts.get(chromosome)) {
            final Optional<GCProfile> optionalGCProfile = gcProfileSelector.select(cobaltPosition);
            if (optionalGCProfile.isPresent()) {
                final GCProfile gcProfile = optionalGCProfile.get();
                referenceRatiosBuilder.addPosition(chromosome, gcProfile, cobaltPosition.referenceReadCount());
                tumorRatiosBuilder.addPosition(chromosome, gcProfile, cobaltPosition.tumorReadCount());
            }
        }
    }

    referenceGCMedianReadCount = referenceRatiosBuilder.gcMedianReadCount();
    referenceRatios = referenceRatiosBuilder.build(referenceGCMedianReadCount);

    tumorGCMedianReadCount = tumorRatiosBuilder.gcMedianReadCount();
    tumorRatios = tumorRatiosBuilder.build(tumorGCMedianReadCount);
}
 
Example 7
Source File: CheckCfgJavaValidator.java    From dsl-devkit with Eclipse Public License 1.0 6 votes vote down vote up
/**
 * Gets duplicates of a given type based on a guard (predicate). A given function is used for converting an instance of type T
 * to a string which is used for checking for duplicates.
 *
 * @param <T>
 *          the generic type
 * @param predicate
 *          the predicate acting as a guard
 * @param function
 *          returns a string for an instance of type T
 * @param elements
 *          the elements to be checked
 * @return the duplicates
 */
private <T extends EObject> Iterable<T> getDuplicates(final Predicate<T> predicate, final Function<T, String> function, final Iterable<T> elements) {
  List<T> result = Lists.newArrayList();
  Multimap<String, T> multiMap = Multimaps.newMultimap(Maps.<String, Collection<T>> newHashMap(), new Supplier<Collection<T>>() {
    @Override
    public Collection<T> get() {
      return Lists.<T> newArrayList();
    }
  });
  for (final T candidate : elements) {
    if (predicate.apply(candidate)) {
      multiMap.put(function.apply(candidate), candidate);
    }
  }
  for (String elem : multiMap.keySet()) {
    final Collection<T> duplicates = multiMap.get(elem);
    if (duplicates.size() > 1) {
      result.addAll(duplicates);
    }
  }

  return result;
}
 
Example 8
Source File: SaltStatesTest.java    From cloudbreak with Apache License 2.0 6 votes vote down vote up
@Test
public void jidInfoHighTest() throws Exception {
    String jobId = "2";

    InputStream responseStream = SaltStatesTest.class.getResourceAsStream("/jid_response.json");
    String response = IOUtils.toString(responseStream);
    responseStream.close();
    Map<?, ?> responseMap = new ObjectMapper().readValue(response, Map.class);
    when(saltConnector.run(eq("jobs.lookup_jid"), any(), any(), eq("jid"), any())).thenReturn(responseMap);

    Multimap<String, String> jidInfo = SaltStates.jidInfo(saltConnector, jobId, target, StateType.HIGH);
    verify(saltConnector, times(1)).run("jobs.lookup_jid", RUNNER, Map.class, "jid", jobId);

    assertThat(jidInfo.keySet(), hasSize(1));
    assertThat(jidInfo.entries(), hasSize(4));
    String hostName = jidInfo.keySet().iterator().next();
    Collection<String> hostErrors = jidInfo.get(hostName);

    assertThat(hostErrors, containsInAnyOrder(
            "\nName: /opt/ambari-server/ambari-server-init.sh\nComment: Source file salt://ambari/scripts/ambari-server-initttt.sh not found",
            "\nName: ambari-server\nComment: Service ambari-server is already enabled, and is dead",
            "\nComment: Command \"/opt/ambari-server/install-mpack-1.sh\" run\nStderr: + ARGS= + echo yes + ambari-server install-mpack --",
            "\nName: haveged\nComment: Package haveged is already installed."));
}
 
Example 9
Source File: SysDeptServiceImpl.java    From momo-cloud-permission with Apache License 2.0 6 votes vote down vote up
private void transformDeptTree(List<DepTreeRes> deptLevelList, String level, Multimap<String, DepTreeRes> levelDeptMap) {
    for (DepTreeRes deptLevelDto : deptLevelList) {
        // 遍历该层的每个元素
        // 处理当前层级的数据
        String nextLevel = LevelUtil.calculateLevel(level, deptLevelDto.getId());
        // 处理下一层
        List<DepTreeRes> tempDeptList = (List<DepTreeRes>) levelDeptMap.get(nextLevel);
        if (!CollectionUtils.isEmpty(tempDeptList)) {
            // 排序
            tempDeptList.sort(deptSeqComparator);
            // 设置下一层部门
            deptLevelDto.setDepTreeResList(tempDeptList);
            // 进入到下一层处理
            transformDeptTree(tempDeptList, nextLevel, levelDeptMap);
        }
    }
}
 
Example 10
Source File: AbstractFunctionalQuery.java    From datawave with Apache License 2.0 5 votes vote down vote up
protected void addMetadataEntries(Multimap<String,Key> metadataEntries) throws AccumuloSecurityException, AccumuloException, TableNotFoundException {
    MultiTableBatchWriter multiTableWriter = connector.createMultiTableBatchWriter(new BatchWriterConfig());
    BatchWriter writer = multiTableWriter.getBatchWriter(QueryTestTableHelper.METADATA_TABLE_NAME);
    for (String field : metadataEntries.keySet()) {
        Mutation mutation = new Mutation(new Text(field));
        for (Key key : metadataEntries.get(field)) {
            metadataEntries.put(field, key);
            mutation.put(key.getColumnFamily(), key.getColumnQualifier(), key.getColumnVisibilityParsed(), new Value());
        }
        writer.addMutation(mutation);
    }
    writer.close();
    connector.tableOperations().compact(QueryTestTableHelper.METADATA_TABLE_NAME, new Text("\0"), new Text("~"), true, true);
}
 
Example 11
Source File: ExecutionPlanner.java    From samza with Apache License 2.0 5 votes vote down vote up
/**
 * Groups streams participating in joins together.
 */
private static List<StreamSet> groupJoinedStreams(JobGraph jobGraph) {
  // Group input operator specs (input/intermediate streams) by the joins they participate in.
  Multimap<OperatorSpec, InputOperatorSpec> joinOpSpecToInputOpSpecs =
      OperatorSpecGraphAnalyzer.getJoinToInputOperatorSpecs(
          jobGraph.getApplicationDescriptorImpl().getInputOperators().values());

  Map<String, TableDescriptor> tableDescriptors = jobGraph.getTables().stream()
      .collect(Collectors.toMap(TableDescriptor::getTableId, Function.identity()));

  // Convert every group of input operator specs into a group of corresponding stream edges.
  List<StreamSet> streamSets = new ArrayList<>();
  for (OperatorSpec joinOpSpec : joinOpSpecToInputOpSpecs.keySet()) {
    Collection<InputOperatorSpec> joinedInputOpSpecs = joinOpSpecToInputOpSpecs.get(joinOpSpec);
    StreamSet streamSet = getStreamSet(joinOpSpec.getOpId(), joinedInputOpSpecs, jobGraph);

    // If current join is a stream-table join, add the stream edges corresponding to side-input
    // streams associated with the joined table (if any).
    if (joinOpSpec instanceof StreamTableJoinOperatorSpec) {
      StreamTableJoinOperatorSpec streamTableJoinOperatorSpec = (StreamTableJoinOperatorSpec) joinOpSpec;
      TableDescriptor tableDescriptor = tableDescriptors.get(streamTableJoinOperatorSpec.getTableId());
      if (tableDescriptor instanceof LocalTableDescriptor) {
        LocalTableDescriptor localTableDescriptor = (LocalTableDescriptor) tableDescriptor;
        Collection<String> sideInputs = ListUtils.emptyIfNull(localTableDescriptor.getSideInputs());
        Iterable<StreamEdge> sideInputStreams = sideInputs.stream().map(jobGraph::getStreamEdge)::iterator;
        Iterable<StreamEdge> streams = streamSet.getStreamEdges();
        streamSet = new StreamSet(streamSet.getSetId(), Iterables.concat(streams, sideInputStreams));
      }
    }

    streamSets.add(streamSet);
  }

  return Collections.unmodifiableList(streamSets);
}
 
Example 12
Source File: ASMHelper.java    From NOVA-Core with GNU Lesser General Public License v3.0 5 votes vote down vote up
public static byte[] injectMethods(String name, byte[] bytes, Multimap<String, MethodInjector> injectors) {
	if (injectors.containsKey(name)) {
		ClassNode cnode = createClassNode(bytes);

		for (MethodInjector injector : injectors.get(name)) {
			MethodNode method = findMethod(injector.method, cnode);
			if (method == null) {
				throw new RuntimeException("Method not found: " + injector.method);
			}
			Game.logger().info("Injecting into {}\n{}", injector.method, printInsnList(injector.injection));

			List<AbstractInsnNode> callNodes;
			if (injector.before) {
				callNodes = InstructionComparator.insnListFindStart(method.instructions, injector.needle);
			} else {
				callNodes = InstructionComparator.insnListFindEnd(method.instructions, injector.needle);
			}

			if (callNodes.size() == 0) {
				throw new RuntimeException("Needle not found in Haystack: " + injector.method + "\n" + printInsnList(injector.needle));
			}

			for (AbstractInsnNode node : callNodes) {
				if (injector.before) {
					Game.logger().info("Injected before: {}", printInsn(node));
					method.instructions.insertBefore(node, cloneInsnList(injector.injection));
				} else {
					Game.logger().info("Injected after: {}", printInsn(node));
					method.instructions.insert(node, cloneInsnList(injector.injection));
				}
			}
		}

		bytes = createBytes(cnode, ClassWriter.COMPUTE_FRAMES);
	}
	return bytes;
}
 
Example 13
Source File: APICallClustererMAPO.java    From api-mining with GNU General Public License v3.0 5 votes vote down vote up
public static void main(final String[] args) throws Exception {

		final String dataset = "/afs/inf.ed.ac.uk/user/j/jfowkes/Code/Sequences/Datasets/API/srclibs/calls/netty.arff";
		final Multimap<Integer, String> assignments = clusterAPICallSeqs(dataset, 10);

		// Print assignments
		for (final int cluster : assignments.keySet()) {
			System.out.println("===== Cluster #" + cluster);
			for (final String seq : assignments.get(cluster))
				System.out.println("      " + seq);
		}
	}
 
Example 14
Source File: BundleUpgradeParser.java    From brooklyn-server with Apache License 2.0 5 votes vote down vote up
private static Multimap<VersionedName, VersionRangedName> parseUpgradeForTypesHeader(String input, Bundle bundle, Supplier<? extends Iterable<? extends RegisteredType>> typeSupplier, Multimap<VersionedName, VersionRangedName> upgradesForBundles) {
    List<String> sourceVersions = null;
    if (upgradesForBundles!=null) {
        Collection<VersionRangedName> acceptableRanges = upgradesForBundles.get(new VersionedName(bundle));
        if (acceptableRanges!=null && !acceptableRanges.isEmpty()) {
            for (VersionRangedName n: acceptableRanges) {
                if (n.getSymbolicName().equals(bundle.getSymbolicName())) {
                    if (sourceVersions==null) {
                        sourceVersions = MutableList.of();
                    }
                    sourceVersions.add(n.getOsgiVersionRange().toString());
                }
            }
        }
    }
    Set<VersionedName> typeSupplierNames = MutableList.copyOf(typeSupplier.get()).stream().map(
        (t) -> VersionedName.toOsgiVersionedName(t.getVersionedName())).collect(Collectors.toSet());
    if (input==null && sourceVersions!=null && !sourceVersions.isEmpty()) {
        input = "*";
    }
    return parseVersionRangedNameEqualsVersionedNameList(input, false,
        // wildcard means all types, all versions of this bundle this bundle replaces
        getTypeNamesInBundle(typeSupplier), sourceVersions,
        // default target is same type at version of this bundle
        (i) -> { 
            VersionedName targetTypeAtBundleVersion = new VersionedName(i.getSymbolicName(), bundle.getVersion());
            if (!typeSupplierNames.contains(VersionedName.toOsgiVersionedName(targetTypeAtBundleVersion))) {
                throw new IllegalStateException("Bundle manifest declares it upgrades "+i+" "
                    + "but does not declare an explicit target and does not contain inferred target "+targetTypeAtBundleVersion);
            }
            return targetTypeAtBundleVersion; 
        });
}
 
Example 15
Source File: GetAccountHierarchy.java    From google-ads-java with Apache License 2.0 5 votes vote down vote up
/**
 * Prints the specified account's hierarchy using recursion.
 *
 * @param customerClient the customer client whose info will be printed and its child accounts
 *     will be processed if it's a manager.
 * @param customerIdsToChildAccounts a map containing the account hierarchy information.
 * @param depth the current depth we are printing from in the account hierarchy.
 */
private void printAccountHierarchy(
    CustomerClient customerClient,
    Multimap<Long, CustomerClient> customerIdsToChildAccounts,
    int depth) {
  String leadingSpace = " ";
  if (depth == 0) {
    System.out.println("Customer ID (Descriptive Name, Currency Code, Time Zone");
    leadingSpace = "";
  } else {
    System.out.println("|");
  }
  System.out.print(Strings.repeat("-", depth * 2));
  long customerId = customerClient.getId().getValue();
  System.out.printf(
      leadingSpace + "%d ('%s', '%s', '%s')%n",
      customerId,
      customerClient.getDescriptiveName().getValue(),
      customerClient.getCurrencyCode().getValue(),
      customerClient.getTimeZone().getValue());

  // Recursively calls this function for all child accounts of customerClient if the current
  // customer is a manager account.
  for (CustomerClient childCustomer : customerIdsToChildAccounts.get(customerId)) {
    printAccountHierarchy(childCustomer, customerIdsToChildAccounts, depth + 1);
  }
}
 
Example 16
Source File: EnhanceMojo.java    From uima-uimafit with Apache License 2.0 5 votes vote down vote up
/**
 * Write a report on any meta data missing from components.
 */
private void writeMissingMetaDataReport(File aReportFile, Multimap<String, String> aReportData)
        throws MojoExecutionException {
  String[] classes = aReportData.keySet().toArray(new String[aReportData.keySet().size()]);
  Arrays.sort(classes);

  PrintWriter out = null;
  FileUtils.mkdir(aReportFile.getParent());
  try {
    out = new PrintWriter(new OutputStreamWriter(new FileOutputStream(aReportFile), encoding));

    if (classes.length > 0) {
      for (String clazz : classes) {
        out.printf("%s %s%n", MARK_CLASS, clazz);
        Collection<String> messages = aReportData.get(clazz);
        if (messages.isEmpty()) {
          out.printf("  No problems");
        } else {
          for (String message : messages) {
            out.printf("  %s%n", message);
          }
        }
        out.printf("%n");
      }
    } else {
      out.printf("%s%n", MARK_NO_MISSING_META_DATA);
    }
  } catch (IOException e) {
    throw new MojoExecutionException("Unable to write missing meta data report to ["
            + aReportFile + "]" + ExceptionUtils.getRootCauseMessage(e), e);
  } finally {
    IOUtils.closeQuietly(out);
  }
}
 
Example 17
Source File: ProtobufEdgeDataTypeHandler.java    From datawave with Apache License 2.0 4 votes vote down vote up
protected EdgeDataBundle createEdge(EdgeDefinition edgeDef, RawRecordContainer event, NormalizedContentInterface ifaceSource, String sourceGroup,
                String sourceSubGroup, NormalizedContentInterface ifaceSink, String sinkGroup, String sinkSubGroup, String edgeAttribute2,
                String edgeAttribute3, Multimap<String,NormalizedContentInterface> normalizedFields,
                Map<String,Multimap<String,NormalizedContentInterface>> depthFirstList, String loadDate, long activityDate, boolean validActivityDate) {
    // Get the edge value
    EdgeDataBundle edgeDataBundle = new EdgeDataBundle(edgeDef, ifaceSource, ifaceSink, event, this.getHelper(event.getDataType()));
    
    if (edgeAttribute2 != null) {
        edgeDataBundle.setEdgeAttribute2(edgeAttribute2);
    }
    if (edgeAttribute3 != null) {
        edgeDataBundle.setEdgeAttribute3(edgeAttribute3);
    }
    
    edgeDataBundle.setLoadDate(loadDate);
    edgeDataBundle.setActivityDate(activityDate);
    edgeDataBundle.setValidActivityDate(validActivityDate);
    
    // setup duration
    if (edgeDef.hasDuration()) {
        if (edgeDef.getUDDuration()) {
            NormalizedContentInterface upnci = getNullKeyedNCI(edgeDef.getUpTime(), normalizedFields);
            NormalizedContentInterface downnci = getNullKeyedNCI(edgeDef.getDownTime(), normalizedFields);
            if (null != upnci && null != downnci) {
                edgeDataBundle.initDuration(upnci, downnci);
            }
        } else {
            NormalizedContentInterface elnci = getNullKeyedNCI(edgeDef.getElapsedTime(), normalizedFields);
            if (null != elnci) {
                edgeDataBundle.initDuration(elnci);
            }
        }
    }
    
    String typeName = event.getDataType().typeName();
    
    // if the edgeDef is an enrichment definition, fill in the enrichedValue
    if (edgeDef.isEnrichmentEdge()) {
        if (edgeTypeLookup.containsKey(typeName)) {
            // if the group is the same as the sink or source,
            // then ensure we use the correct subgroup, otherwise we will enrich with the first value found
            Collection<NormalizedContentInterface> ifaceEnrichs = normalizedFields.get(edgeDef.getEnrichmentField());
            if (null != ifaceEnrichs && !ifaceEnrichs.isEmpty()) {
                String enrichGroup = getGroup(edgeDef.getEnrichmentField());
                if (enrichGroup != NO_GROUP) {
                    if (enrichGroup.equals(sourceGroup)) {
                        ifaceEnrichs = depthFirstList.get(edgeDef.getEnrichmentField()).get(sourceSubGroup);
                    } else if (enrichGroup.equals(sinkGroup)) {
                        ifaceEnrichs = depthFirstList.get(edgeDef.getEnrichmentField()).get(sinkSubGroup);
                    }
                }
                if (null != ifaceEnrichs && !ifaceEnrichs.isEmpty()) {
                    if (ifaceEnrichs.size() > 1) {
                        log.warn("The enrichment field for " + edgeDef.getEnrichmentField() + " contains multiple values...choosing first valid entry");
                        
                    }
                    for (NormalizedContentInterface ifaceEnrich : ifaceEnrichs) {
                        // the value of the enrichment field is a edge type lookup
                        String enrichedIndex = ifaceEnrich.getIndexedFieldValue();
                        // if we know this enrichment mode, then use it
                        if (edgeTypeLookup.get(typeName).containsKey(enrichedIndex)) {
                            edgeDataBundle.setEnrichedIndex(enrichedIndex); // required for eventMetadataRegistry
                            edgeDataBundle.setEnrichedValue(edgeTypeLookup.get(typeName).get(enrichedIndex));
                            break;
                        }
                    }
                }
                
            }
        } else {
            log.error("Cannot enrich edge because Enrichment Edge Types are not defined for data type: " + typeName);
            
        }
    }
    
    if (event.isRequiresMasking()) {
        maskEdge(edgeDataBundle, event);
    }
    // Is this an edge to delete?
    edgeDataBundle.setIsDeleting(this.getHelper(event.getDataType()).getDeleteMode());
    
    // Validate the edge value
    if (!edgeDataBundle.isValid()) {
        return null;
    }
    
    // Set edge type if this is an enrichment edge
    if (edgeDef.isEnrichmentEdge()) {
        if (null != edgeDataBundle.getEnrichedValue()) {
            edgeDataBundle.setEdgeType(edgeDataBundle.getEnrichedValue());
        }
    }
    
    // check value blacklist
    if (this.enableBlacklist && isBlacklistValue(typeName, edgeDataBundle.getSource().getValue(ValueType.INDEXED))
                    || isBlacklistValue(typeName, edgeDataBundle.getSink().getValue(ValueType.INDEXED))) {
        return null;
    }
    
    return edgeDataBundle;
}
 
Example 18
Source File: ResponseMapping.java    From tac-kbp-eal with MIT License 4 votes vote down vote up
public ResponseLinking apply(ResponseLinking responseLinking) {
  final Function<Response, Response> responseMapping = MapUtils.asFunction(replacedResponses,
      Functions.<Response>identity());
  final Predicate<Response> notDeleted = not(in(deletedResponses));

  final ImmutableSet.Builder<ResponseSet> newResponseSetsB = ImmutableSet.builder();
  final ImmutableMap.Builder<ResponseSet, ResponseSet> oldResponseSetToNewB = ImmutableMap.builder();
  for (final ResponseSet responseSet : responseLinking.responseSets()) {
    final ImmutableSet<Response> filteredResponses = FluentIterable.from(responseSet)
        .filter(notDeleted)
        .transform(responseMapping).toSet();
    if (!filteredResponses.isEmpty()) {
      final ResponseSet derived = ResponseSet.from(filteredResponses);
      newResponseSetsB.add(derived);
      oldResponseSetToNewB.put(responseSet, derived);
    }
  }

  final ImmutableSet<ResponseSet> newResponseSets = newResponseSetsB.build();
  final Predicate<Response> notLinked = not(in(ImmutableSet.copyOf(
      Iterables.concat(newResponseSets))));
  final ImmutableSet<Response> newIncompletes =
      FluentIterable.from(responseLinking.incompleteResponses())
          .filter(notDeleted)
          .transform(responseMapping)
          .filter(notLinked)
          .toSet();

  final Optional<ImmutableBiMap<String, ResponseSet>> newResponseSetIDMap;
  if (responseLinking.responseSetIds().isPresent()) {
    final BiMap<String, ResponseSet> responseSetIDs = responseLinking.responseSetIds().get();
    final ImmutableMap<ResponseSet, ResponseSet> oldResponseSetToNew = oldResponseSetToNewB.build();
    final Multimap<ResponseSet, ResponseSet> newResponseToOld = oldResponseSetToNew.asMultimap().inverse();
    final ImmutableBiMap.Builder<String, ResponseSet> newResponseSetIDMapB =
        ImmutableBiMap.builder();
    // since response sets may lose responses and no longer be unique, we choose the earliest alphabetical ID
    for(final ResponseSet nu: newResponseToOld.keySet()) {
      final ImmutableSet.Builder<String> candidateIDsB = ImmutableSet.builder();
      for(final ResponseSet old: newResponseToOld.get(nu)) {
        candidateIDsB.add(responseSetIDs.inverse().get(old));
      }
      final ImmutableSet<String> candidateIDs = candidateIDsB.build();
      final String newID = Ordering.natural().sortedCopy(candidateIDs).get(0);
      if(candidateIDs.size() > 1) {
        // only log if we're converting multiple sets.
        log.debug("Collapsing response sets {} to {}", candidateIDs, newID);
      }
      newResponseSetIDMapB.put(newID, nu);
    }
    newResponseSetIDMap = Optional.of(newResponseSetIDMapB.build());
  } else {
    newResponseSetIDMap = Optional.absent();
  }

  return ResponseLinking.builder().docID(responseLinking.docID())
      .responseSets(newResponseSets).incompleteResponses(newIncompletes)
      .responseSetIds(newResponseSetIDMap).build();
}
 
Example 19
Source File: CompilationErrorsTest.java    From RetroFacebook with Apache License 2.0 4 votes vote down vote up
private void assertCompilationResultIs(
    Multimap<Diagnostic.Kind, Pattern> expectedDiagnostics,
    List<String> testSourceCode) throws IOException {
  assertFalse(testSourceCode.isEmpty());

  StringWriter compilerOut = new StringWriter();

  List<String> options = ImmutableList.of(
      "-sourcepath", tmpDir.getPath(),
      "-d", tmpDir.getPath(),
      "-processor", RetroFacebookProcessor.class.getName(),
      "-Xlint");
  javac.getTask(compilerOut, fileManager, diagnosticCollector, options, null, null);
  // This doesn't compile anything but communicates the paths to the JavaFileManager.

  // Convert the strings containing the source code of the test classes into files that we
  // can feed to the compiler.
  List<String> classNames = Lists.newArrayList();
  List<JavaFileObject> sourceFiles = Lists.newArrayList();
  for (String source : testSourceCode) {
    ClassName className = ClassName.extractFromSource(source);
    File dir = new File(tmpDir, className.sourceDirectoryName());
    dir.mkdirs();
    assertTrue(dir.isDirectory());  // True if we just made it, or it was already there.
    String sourceName = className.simpleName + ".java";
    Files.write(source, new File(dir, sourceName), Charsets.UTF_8);
    classNames.add(className.fullName());
    JavaFileObject sourceFile = fileManager.getJavaFileForInput(
        StandardLocation.SOURCE_PATH, className.fullName(), Kind.SOURCE);
    sourceFiles.add(sourceFile);
  }
  assertEquals(classNames.size(), sourceFiles.size());

  // Compile the classes.
  JavaCompiler.CompilationTask javacTask = javac.getTask(
      compilerOut, fileManager, diagnosticCollector, options, classNames, sourceFiles);
  boolean compiledOk = javacTask.call();

  // Check that there were no compilation errors unless we were expecting there to be.
  // We ignore "notes", typically debugging output from the annotation processor
  // when that is enabled.
  Multimap<Diagnostic.Kind, String> diagnostics = ArrayListMultimap.create();
  for (Diagnostic<?> diagnostic : diagnosticCollector.getDiagnostics()) {
    boolean ignore = (diagnostic.getKind() == Diagnostic.Kind.NOTE
        || (diagnostic.getKind() == Diagnostic.Kind.WARNING
            && diagnostic.getMessage(null).contains(
                "No processor claimed any of these annotations")));
    if (!ignore) {
      diagnostics.put(diagnostic.getKind(), diagnostic.getMessage(null));
    }
  }
  assertEquals(diagnostics.containsKey(Diagnostic.Kind.ERROR), !compiledOk);
  assertEquals("Diagnostic kinds should match: " + diagnostics,
      expectedDiagnostics.keySet(), diagnostics.keySet());
  for (Map.Entry<Diagnostic.Kind, Pattern> expectedDiagnostic : expectedDiagnostics.entries()) {
    Collection<String> actualDiagnostics = diagnostics.get(expectedDiagnostic.getKey());
    assertTrue("Diagnostics should contain " + expectedDiagnostic + ": " + diagnostics,
        Iterables.any(actualDiagnostics, Predicates.contains(expectedDiagnostic.getValue())));
  }
}
 
Example 20
Source File: AggregatingReducer.java    From datawave with Apache License 2.0 3 votes vote down vote up
/**
 * Can be used to execute this process manually
 * 
 * @param entries
 * @param ctx
 * @throws IOException
 * @throws InterruptedException
 */
public void reduce(Multimap<IK,IV> entries, TaskInputOutputContext<?,?,OK,OV> ctx) throws IOException, InterruptedException {
    for (IK key : entries.keySet()) {
        Collection<IV> values = entries.get(key);
        this.doReduce(key, values, ctx);
    }
}