Java Code Examples for com.google.common.collect.HashBasedTable#create()

The following examples show how to use com.google.common.collect.HashBasedTable#create() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ChartCell.java    From EasySRL with Apache License 2.0 6 votes vote down vote up
public ChartCellNbestFactory(final int nbest, final double nbestBeam, final int maxSentenceLength,
		final Collection<Category> categories) {
	super();
	this.nbest = nbest;
	this.nbestBeam = nbestBeam;
	final Random randomGenerator = new Random();

	// Build a hash for every possible dependency
	categoryToArgumentToHeadToModifierToHash = HashBasedTable.create();
	for (final Category c : categories) {
		for (int i = 1; i <= c.getNumberOfArguments(); i++) {
			final int[][] array = new int[maxSentenceLength][maxSentenceLength];
			categoryToArgumentToHeadToModifierToHash.put(c, i, array);
			for (int head = 0; head < maxSentenceLength; head++) {
				for (int child = 0; child < maxSentenceLength; child++) {
					array[head][child] = randomGenerator.nextInt();
				}
			}
		}
	}
}
 
Example 2
Source File: DeviceManager.java    From javaide with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Returns the known {@link Device} list.
 *
 * @param deviceFilter A combination of the {@link DeviceFilter} constants
 *                     or the constant {@link DeviceManager#ALL_DEVICES}.
 * @return A copy of the list of {@link Device}s. Can be empty but not null.
 */
@NonNull
public Collection<Device> getDevices(@NonNull EnumSet<DeviceFilter> deviceFilter) {
    initDevicesLists();
    Table<String, String, Device> devices = HashBasedTable.create();
    if (mUserDevices != null && (deviceFilter.contains(DeviceFilter.USER))) {
        devices.putAll(mUserDevices);
    }
    if (mDefaultDevices != null && (deviceFilter.contains(DeviceFilter.DEFAULT))) {
        devices.putAll(mDefaultDevices);
    }
    if (mVendorDevices != null && (deviceFilter.contains(DeviceFilter.VENDOR))) {
        devices.putAll(mVendorDevices);
    }
    if (mSysImgDevices != null && (deviceFilter.contains(DeviceFilter.SYSTEM_IMAGES))) {
        devices.putAll(mSysImgDevices);
    }
    return Collections.unmodifiableCollection(devices.values());
}
 
Example 3
Source File: FetchPartitionMessageResponseCodec.java    From joyqueue with Apache License 2.0 6 votes vote down vote up
@Override
public FetchPartitionMessageResponse decode(JoyQueueHeader header, ByteBuf buffer) throws Exception {
    Table<String, Short, FetchPartitionMessageAckData> data = HashBasedTable.create();
    short topicSize = buffer.readShort();
    for (int i = 0; i < topicSize; i++) {
        String topic = Serializer.readString(buffer, Serializer.SHORT_SIZE);
        int partitionSize = buffer.readShort();
        for (int j = 0; j < partitionSize; j++) {
            short partition = buffer.readShort();
            short messageSize = buffer.readShort();
            List<BrokerMessage> messages = Lists.newArrayListWithCapacity(messageSize);
            for (int k = 0; k < messageSize; k++) {
                messages.add(Serializer.readBrokerMessage(buffer));
            }
            JoyQueueCode code = JoyQueueCode.valueOf(buffer.readInt());
            FetchPartitionMessageAckData fetchPartitionMessageAckData = new FetchPartitionMessageAckData(messages, code);
            data.put(topic, partition, fetchPartitionMessageAckData);
        }
    }

    FetchPartitionMessageResponse fetchPartitionMessageResponse = new FetchPartitionMessageResponse();
    fetchPartitionMessageResponse.setData(data);
    return fetchPartitionMessageResponse;
}
 
Example 4
Source File: DiceRoll.java    From triplea with GNU General Public License v3.0 6 votes vote down vote up
/**
 * Sorts the specified collection of units in ascending order of their attack or defense strength.
 *
 * @param defending {@code true} if the units should be sorted by their defense strength;
 *     otherwise the units will be sorted by their attack strength.
 */
public static void sortByStrength(final List<Unit> units, final boolean defending) {
  // Pre-compute unit strength information to speed up the sort.
  final Table<UnitType, GamePlayer, Integer> strengthTable = HashBasedTable.create();
  for (final Unit unit : units) {
    final UnitType type = unit.getType();
    final GamePlayer owner = unit.getOwner();
    if (!strengthTable.contains(type, owner)) {
      if (defending) {
        strengthTable.put(type, owner, UnitAttachment.get(type).getDefense(owner));
      } else {
        strengthTable.put(type, owner, UnitAttachment.get(type).getAttack(owner));
      }
    }
  }
  final Comparator<Unit> comp =
      (u1, u2) -> {
        final int v1 = strengthTable.get(u1.getType(), u1.getOwner());
        final int v2 = strengthTable.get(u2.getType(), u2.getOwner());
        return Integer.compare(v1, v2);
      };
  units.sort(comp);
}
 
Example 5
Source File: FetchPartitionMessageRequestCodec.java    From joyqueue with Apache License 2.0 6 votes vote down vote up
@Override
public FetchPartitionMessageRequest decode(JoyQueueHeader header, ByteBuf buffer) throws Exception {
    Table<String, Short, FetchPartitionMessageData> partitions = HashBasedTable.create();
    int topicSize = buffer.readShort();
    for (int i = 0; i < topicSize; i++) {
        String topic = Serializer.readString(buffer, Serializer.SHORT_SIZE);
        int partitionSize = buffer.readShort();
        for (int j = 0; j < partitionSize; j++) {
            short partition = buffer.readShort();
            int count = buffer.readInt();
            long index = buffer.readLong();

            partitions.put(topic, partition, new FetchPartitionMessageData(count, index));
        }
    }

    FetchPartitionMessageRequest fetchPartitionMessageRequest = new FetchPartitionMessageRequest();
    fetchPartitionMessageRequest.setPartitions(partitions);
    fetchPartitionMessageRequest.setApp(Serializer.readString(buffer, Serializer.SHORT_SIZE));
    return fetchPartitionMessageRequest;
}
 
Example 6
Source File: ClassifierPredictor.java    From bioasq with Apache License 2.0 6 votes vote down vote up
@Override
public void initialize(UimaContext context) throws ResourceInitializationException {
  super.initialize(context);
  String candidateProviderName = UimaContextHelper
          .getConfigParameterStringValue(context, "candidate-provider");
  candidateProvider = ProviderCache.getProvider(candidateProviderName, CandidateProvider.class);
  String scorerNames = UimaContextHelper.getConfigParameterStringValue(context, "scorers");
  scorers = ProviderCache.getProviders(scorerNames, Scorer.class).stream()
          .map(scorer -> (Scorer<? super T>) scorer).collect(toList());
  String classifierName = UimaContextHelper.getConfigParameterStringValue(context, "classifier");
  classifier = ProviderCache.getProvider(classifierName, ClassifierProvider.class);
  if ((featureFilename = UimaContextHelper.getConfigParameterStringValue(context, "feature-file",
          null)) != null) {
    feat2value = HashBasedTable.create();
  }
}
 
Example 7
Source File: CheckpointManager.java    From qmq with Apache License 2.0 6 votes vote down vote up
private ActionCheckpoint duplicateActionCheckpoint() {
    actionCheckpointGuard.lock();
    try {
        final Table<String, String, ConsumerGroupProgress> progresses = HashBasedTable.create();
        for (final ConsumerGroupProgress progress : actionCheckpoint.getProgresses().values()) {
            final Map<String, ConsumerProgress> consumers = progress.getConsumers();
            if (consumers == null) {
                continue;
            }

            final Map<String, ConsumerProgress> consumersCopy = new HashMap<>();
            for (final ConsumerProgress consumer : consumers.values()) {
                consumersCopy.put(consumer.getConsumerId(), new ConsumerProgress(consumer));
            }
            final String subject = progress.getSubject();
            final String group = progress.getGroup();
            progresses.put(subject, group, new ConsumerGroupProgress(subject, group, progress.isBroadcast(), progress.getPull(), consumersCopy));
        }
        final long offset = actionCheckpoint.getOffset();
        return new ActionCheckpoint(offset, progresses);
    } finally {
        actionCheckpointGuard.unlock();
    }
}
 
Example 8
Source File: MetadataIOIntegrationTest.java    From blueflood with Apache License 2.0 5 votes vote down vote up
@Test
public void writeAllAstyanaxReadAllDatastax() throws IOException {

    Locator l0 = Locator.createLocatorFromPathComponents( getRandomTenantId(), "all.put.astyanax.all.read.datastax.l0" );
    Locator l1 = Locator.createLocatorFromPathComponents( getRandomTenantId(), "all.put.astyanax.all.read.datastax.l1" );

    Table<Locator, String, String> meta = HashBasedTable.create();
    meta.put( l0, CACHE_KEY, RollupType.GAUGE.toString() );
    meta.put( l1, CACHE_KEY, RollupType.SET.toString() );

    astyanaxMetadataIO.putAll( meta );

    Set<Locator> query = new HashSet<Locator>( Arrays.asList( l0, l1 ) );
    Table<Locator, String, String> result = dMetadataIO.getAllValues( query );

    assertEquals( 2, result.size() );

    Map<String, String> row = result.row( l0 );
    assertEquals( 1, row.size() );

    Map.Entry<String, String> entry = row.entrySet().iterator().next();
    assertEquals( CACHE_KEY, entry.getKey() );
    assertEquals( RollupType.GAUGE.toString(), entry.getValue() );

    Map<String, String> row2 = result.row( l1 );
    assertEquals( 1, row2.size() );

    Map.Entry<String, String> entry2 = row2.entrySet().iterator().next();
    assertEquals( CACHE_KEY, entry2.getKey() );
    assertEquals( RollupType.SET.toString(), entry2.getValue() );

}
 
Example 9
Source File: ConsumerClient.java    From joyqueue with Apache License 2.0 5 votes vote down vote up
protected FetchPartitionMessageRequest buildPartitionTopicMessageCommand(Table<String, Short, Long> partitions, String app, int count) {
    Table<String, Short, FetchPartitionMessageData> partitionMap = HashBasedTable.create();
    for (Map.Entry<String, Map<Short, Long>> topicEntry : partitions.rowMap().entrySet()) {
        String topic = topicEntry.getKey();
        for (Map.Entry<Short, Long> partitionEntry : topicEntry.getValue().entrySet()) {
            partitionMap.put(topic, partitionEntry.getKey(), new FetchPartitionMessageData(count, partitionEntry.getValue()));
        }
    }

    FetchPartitionMessageRequest fetchPartitionMessageRequest = new FetchPartitionMessageRequest();
    fetchPartitionMessageRequest.setPartitions(partitionMap);
    fetchPartitionMessageRequest.setApp(app);
    return fetchPartitionMessageRequest;
}
 
Example 10
Source File: Payer.java    From synthea with Apache License 2.0 5 votes vote down vote up
/**
 * Java Serialization support for the entryUtilization field.
 * @param ois stream to read from
 */
private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException {
  ois.defaultReadObject();
  ArrayList<UtilizationBean> entryUtilizationElements = 
          (ArrayList<UtilizationBean>)ois.readObject();
  if (entryUtilizationElements != null) {
    this.entryUtilization = HashBasedTable.create();
    for (UtilizationBean u: entryUtilizationElements) {
      this.entryUtilization.put(u.year, u.type, u.count);
    }
  }
}
 
Example 11
Source File: WeightedPAutomaton.java    From SPDS with Eclipse Public License 2.0 5 votes vote down vote up
public String toLabelGroupedDotString() {
    HashBasedTable<D, N, Collection<D>> groupedByTargetAndLabel = HashBasedTable.create();
    for (Transition<N, D> t : transitions) {
        Collection<D> collection = groupedByTargetAndLabel.get(t.getTarget(), t.getLabel());
        if (collection == null)
            collection = Sets.newHashSet();
        collection.add(t.getStart());
        groupedByTargetAndLabel.put(t.getTarget(), t.getLabel(), collection);
    }
    String s = "digraph {\n";
    for (D target : groupedByTargetAndLabel.rowKeySet()) {
        for (N label : groupedByTargetAndLabel.columnKeySet()) {
            Collection<D> source = groupedByTargetAndLabel.get(target, label);
            if (source == null)
                continue;
            s += "\t\"" + Joiner.on("\\n").join(source) + "\"";
            s += " -> \"" + wrapIfInitialOrFinalState(target) + "\"";
            s += "[label=\"" + label + "\"];\n";
        }
    }
    s += "}\n";
    s += "Transitions: " + transitions.size() + "\n";
    for (WeightedPAutomaton<N, D, W> nested : nestedAutomatons) {
        s += "NESTED -> \n";
        s += nested.toDotString();
    }
    return s;
}
 
Example 12
Source File: IncrementalDataPlane.java    From batfish with Apache License 2.0 5 votes vote down vote up
@Nonnull
private Table<String, String, Set<Layer2Vni>> computeVniSettings() {
  Table<String, String, Set<Layer2Vni>> result = HashBasedTable.create();
  for (Node node : _nodes.values()) {
    for (Entry<String, VirtualRouter> vr : node.getVirtualRouters().entrySet()) {
      result.put(
          node.getConfiguration().getHostname(), vr.getKey(), vr.getValue().getLayer2Vnis());
    }
  }
  return result;
}
 
Example 13
Source File: CheckpointManager.java    From qmq with Apache License 2.0 5 votes vote down vote up
private ActionCheckpoint loadActionCheckpoint() {
    final Snapshot<ActionCheckpoint> snapshot = actionCheckpointStore.latestSnapshot();
    if (snapshot == null) {
        LOG.info("no action log replay snapshot, return empty state.");
        return new ActionCheckpoint(-1, HashBasedTable.create());
    } else {
        return snapshot.getData();
    }
}
 
Example 14
Source File: InMemoryMetadataIO.java    From blueflood with Apache License 2.0 5 votes vote down vote up
@Override
public Table<Locator, String, String> getAllValues(Set<Locator> locators) throws IOException {
    Table<Locator, String, String> results = HashBasedTable.create();

    for (Locator locator : locators) {
        Map<String, String> metaForLoc = backingTable.row(locator);
        for (Map.Entry<String, String> meta : metaForLoc.entrySet()) {
            results.put(locator, meta.getKey(), meta.getValue());
        }
    }

    return results;
}
 
Example 15
Source File: GuavaTableUnitTest.java    From tutorials with MIT License 5 votes vote down vote up
@Test
public void givenTable_whenColumn_returnsSuccessfully() {
    final Table<String, String, Integer> universityCourseSeatTable = HashBasedTable.create();
    universityCourseSeatTable.put("Mumbai", "Chemical", 120);
    universityCourseSeatTable.put("Mumbai", "IT", 60);
    universityCourseSeatTable.put("Harvard", "Electrical", 60);
    universityCourseSeatTable.put("Harvard", "IT", 120);

    final Map<String, Integer> universitySeatMap = universityCourseSeatTable.column("IT");

    assertThat(universitySeatMap).hasSize(2);
    assertThat(universitySeatMap.get("Mumbai")).isEqualTo(60);
    assertThat(universitySeatMap.get("Harvard")).isEqualTo(120);
}
 
Example 16
Source File: Backend.java    From quantumdb with Apache License 2.0 5 votes vote down vote up
private Table<RefId, Version, String> listTableVersions(Connection connection, Map<String, RefId> refIds, Changelog changelog) throws SQLException {
	Table<RefId, Version, String> mapping = HashBasedTable.create();
	try (Statement statement = connection.createStatement()) {
		ResultSet resultSet = statement.executeQuery("SELECT * FROM quantumdb.ref_versions ORDER BY ref_id ASC;");
		while (resultSet.next()) {
			String refId = resultSet.getString("ref_id");
			String tableName = resultSet.getString("table_name");
			String versionId = resultSet.getString("version_id");
			Version version = changelog.getVersion(versionId);
			RefId refIdRef = refIds.get(refId);
			mapping.put(refIdRef, version, tableName);
		}
	}
	return mapping;
}
 
Example 17
Source File: AbstractEnrichProcessor.java    From nifi with Apache License 2.0 4 votes vote down vote up
/**
 * This method returns the parsed record string in the form of
 * a map of two strings, consisting of a iteration aware attribute
 * names and its values
 *

 * @param  rawResult the raw query results to be parsed
 * @param queryParser The parsing mechanism being used to parse the data into groups
 * @param queryRegex The regex to be used to split the query results into groups. The regex MUST implement at least on named capture group "KEY" to be used to populate the table rows
 * @param lookupKey The regular expression number or the column of a split to be used for matching
 * @return  Table with attribute names and values where each Table row uses the value of the KEY named capture group specified in @param queryRegex
 */
protected Table<String, String, String> parseBatchResponse(String rawResult, String queryParser, String queryRegex, int lookupKey, String schema) {
    // Note the hardcoded record0.
    //  Since iteration is done within the parser and Multimap is used, the record number here will always be 0.
    // Consequentially, 0 is hardcoded so that batched and non batched attributes follow the same naming
    // conventions
    final String recordPosition = ".record0";

    final Table<String, String, String> results = HashBasedTable.create();

    switch (queryParser) {
        case "Split":
            Scanner scanner = new Scanner(rawResult);
            while (scanner.hasNextLine()) {
                String line = scanner.nextLine();
                // Time to Split the results...
                String[] splitResult = line.split(queryRegex);

                for (int r = 0; r < splitResult.length; r++) {
                    results.put(splitResult[ lookupKey - 1 ], "enrich." + schema + recordPosition + ".group" + String.valueOf(r), splitResult[r]);
                }
            }
            break;
        case "RegEx":
        // prepare the regex
        Pattern p;
        // Regex is multiline. Each line should include a KEY for lookup
        p = Pattern.compile(queryRegex, Pattern.MULTILINE);

        Matcher matcher = p.matcher(rawResult);
        while (matcher.find()) {
            try {
                // Note that RegEx matches capture group 0 is usually broad but starting with it anyway
                // for the sake of purity
                for (int r = 0; r <= matcher.groupCount(); r++) {
                    results.put(matcher.group(lookupKey), "enrich." + schema + recordPosition + ".group" + String.valueOf(r), matcher.group(r));
                }
            } catch (IndexOutOfBoundsException e) {
                getLogger().warn("Could not find capture group {} while processing result. You may want to review your " +
                        "Regular Expression to match against the content \"{}\"", new Object[]{lookupKey, rawResult});
            }
        }
        break;
    }

    return results;
}
 
Example 18
Source File: CachingRowGroupIteratorTest.java    From emodb with Apache License 2.0 4 votes vote down vote up
@BeforeMethod
public void setUp() {
    _rowGroups = new ArrayDeque<>();
    _softReferences = HashBasedTable.create();
}
 
Example 19
Source File: AbstractEnrichProcessor.java    From localization_nifi with Apache License 2.0 4 votes vote down vote up
/**
 * This method returns the parsed record string in the form of
 * a map of two strings, consisting of a iteration aware attribute
 * names and its values
 *

 * @param  rawResult the raw query results to be parsed
 * @param queryParser The parsing mechanism being used to parse the data into groups
 * @param queryRegex The regex to be used to split the query results into groups. The regex MUST implement at least on named capture group "KEY" to be used to populate the table rows
 * @param lookupKey The regular expression number or the column of a split to be used for matching
 * @return  Table with attribute names and values where each Table row uses the value of the KEY named capture group specified in @param queryRegex
 */
protected Table<String, String, String> parseBatchResponse(String rawResult, String queryParser, String queryRegex, int lookupKey, String schema) {
    // Note the hardcoded record0.
    //  Since iteration is done within the parser and Multimap is used, the record number here will always be 0.
    // Consequentially, 0 is hardcoded so that batched and non batched attributes follow the same naming
    // conventions
    final String recordPosition = ".record0";

    final Table<String, String, String> results = HashBasedTable.create();

    switch (queryParser) {
        case "Split":
            Scanner scanner = new Scanner(rawResult);
            while (scanner.hasNextLine()) {
                String line = scanner.nextLine();
                // Time to Split the results...
                String[] splitResult = line.split(queryRegex);

                for (int r = 0; r < splitResult.length; r++) {
                    results.put(splitResult[ lookupKey - 1 ], "enrich." + schema + recordPosition + ".group" + String.valueOf(r), splitResult[r]);
                }
            }
            break;
        case "RegEx":
        // prepare the regex
        Pattern p;
        // Regex is multiline. Each line should include a KEY for lookup
        p = Pattern.compile(queryRegex, Pattern.MULTILINE);

        Matcher matcher = p.matcher(rawResult);
        while (matcher.find()) {
            try {
                // Note that RegEx matches capture group 0 is usually broad but starting with it anyway
                // for the sake of purity
                for (int r = 0; r <= matcher.groupCount(); r++) {
                    results.put(matcher.group(lookupKey), "enrich." + schema + recordPosition + ".group" + String.valueOf(r), matcher.group(r));
                }
            } catch (IndexOutOfBoundsException e) {
                getLogger().warn("Could not find capture group {} while processing result. You may want to review your " +
                        "Regular Expression to match against the content \"{}\"", new Object[]{lookupKey, rawResult});
            }
        }
        break;
    }

    return results;
}
 
Example 20
Source File: RipProcess.java    From batfish with Apache License 2.0 4 votes vote down vote up
public RipProcess() {
  _generatedRoutes = new TreeSet<>();
  _interfaces = new TreeSet<>();
  _ripNeighbors = HashBasedTable.create();
}