Java Code Examples for com.google.common.collect.Table#putAll()
The following examples show how to use
com.google.common.collect.Table#putAll() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DeviceManager.java From javaide with GNU General Public License v3.0 | 6 votes |
/** * Returns the known {@link Device} list. * * @param deviceFilter A combination of the {@link DeviceFilter} constants * or the constant {@link DeviceManager#ALL_DEVICES}. * @return A copy of the list of {@link Device}s. Can be empty but not null. */ @NonNull public Collection<Device> getDevices(@NonNull EnumSet<DeviceFilter> deviceFilter) { initDevicesLists(); Table<String, String, Device> devices = HashBasedTable.create(); if (mUserDevices != null && (deviceFilter.contains(DeviceFilter.USER))) { devices.putAll(mUserDevices); } if (mDefaultDevices != null && (deviceFilter.contains(DeviceFilter.DEFAULT))) { devices.putAll(mDefaultDevices); } if (mVendorDevices != null && (deviceFilter.contains(DeviceFilter.VENDOR))) { devices.putAll(mVendorDevices); } if (mSysImgDevices != null && (deviceFilter.contains(DeviceFilter.SYSTEM_IMAGES))) { devices.putAll(mSysImgDevices); } return Collections.unmodifiableCollection(devices.values()); }
Example 2
Source File: ParallelCorpusReader.java From EasySRL with Apache License 2.0 | 6 votes |
private static Collection<SRLParse> getPropbankSection(final String section) throws IOException { final Table<String, Integer, TreebankParse> PTB = new PennTreebank().readCorpus(WSJ); final Table<String, Integer, SRLParse> srlParses = SRLParse.parseCorpus(PTB, Util.readFileLineByLine(new File(PROPBANK, "prop.txt")), USING_NOMBANK ? Util.readFileLineByLine(NOMBANK) : null); final Table<String, Integer, SRLParse> goldParses = TreeBasedTable.create(); for (final Cell<String, Integer, TreebankParse> cell : PTB.cellSet()) { // Propbank files skip sentences with no SRL deps. Add a default // empty parse for all sentences. goldParses.put(cell.getRowKey(), cell.getColumnKey(), new SRLParse(cell.getValue().getWords())); } goldParses.putAll(srlParses); final Collection<SRLParse> result = new ArrayList<>(); for (final Cell<String, Integer, SRLParse> entry : goldParses.cellSet()) { if (entry.getRowKey().startsWith("wsj_" + section)) { result.add(entry.getValue()); } } return result; }
Example 3
Source File: RuntimeEntityResolver.java From bazel with Apache License 2.0 | 6 votes |
private static Class<?> loadClassLiteral( DynamicClassLiteral dynamicClassLiteralRequest, List<JarTransformationRecord> jarTransformationRecords, ClassLoader initialInputClassLoader, Table<Integer, ClassMemberKey<?>, Member> reflectionBasedMembers, Table<Integer, ClassMemberKey<?>, Set<ClassMemberKey<?>>> missingDescriptorLookupRepo, String workingJavaPackage) throws Throwable { int round = dynamicClassLiteralRequest.round(); ClassLoader outputJarClassLoader = round == 0 ? initialInputClassLoader : jarTransformationRecords.get(round - 1).getOutputClassLoader(); String requestedClassName = dynamicClassLiteralRequest.value(); String qualifiedClassName = workingJavaPackage.isEmpty() || requestedClassName.contains(".") ? requestedClassName : workingJavaPackage + "." + requestedClassName; Class<?> classLiteral = outputJarClassLoader.loadClass(qualifiedClassName); reflectionBasedMembers.putAll(getReflectionBasedClassMembers(round, classLiteral)); fillMissingClassMemberDescriptorRepo(round, classLiteral, missingDescriptorLookupRepo); return classLiteral; }
Example 4
Source File: DAbstractMetricIO.java From blueflood with Apache License 2.0 | 6 votes |
/** * Fetch values for a list of {@link com.rackspacecloud.blueflood.types.Locator} * from the specified column family and range. * * This is a base behavior for most rollup types. IO subclasses can override * this behavior as they see fit. * * @param locators * @param columnFamily * @param range * @return */ protected <T extends Object> Table<Locator, Long, T> getValuesForLocators( final List<Locator> locators, String columnFamily, Range range ) { Table<Locator, Long, T> locatorTimestampRollup = HashBasedTable.create(); Map<Locator, List<ResultSetFuture>> resultSetFuturesMap = selectForLocatorListAndRange(columnFamily, locators, range); for (Map.Entry<Locator, List<ResultSetFuture>> entry : resultSetFuturesMap.entrySet() ) { Locator locator = entry.getKey(); List<ResultSetFuture> futures = entry.getValue(); Table<Locator, Long, T> result = toLocatorTimestampValue(futures, locator, columnFamily, range); locatorTimestampRollup.putAll(result); } return locatorTimestampRollup; }
Example 5
Source File: SymbolWriter.java From java-n-IDE-for-Android with Apache License 2.0 | 5 votes |
private Table<String, String, SymbolEntry> getAllSymbols() { Table<String, String, SymbolEntry> symbols = HashBasedTable.create(); for (SymbolLoader symbolLoader : mSymbols) { symbols.putAll(symbolLoader.getSymbols()); } return symbols; }
Example 6
Source File: SymbolWriter.java From javaide with GNU General Public License v3.0 | 5 votes |
private Table<String, String, SymbolEntry> getAllSymbols() { Table<String, String, SymbolEntry> symbols = HashBasedTable.create(); for (SymbolLoader symbolLoader : mSymbols) { symbols.putAll(symbolLoader.getSymbols()); } return symbols; }
Example 7
Source File: SequenceMiningCore.java From sequence-mining with GNU General Public License v3.0 | 5 votes |
/** Evaluate a candidate sequence to see if it should be included */ private static boolean evaluateCandidate(final Table<Sequence, Integer, Double> sequences, final TransactionDatabase transactions, final InferenceAlgorithm inferenceAlgorithm, final Sequence candidate) { logger.finer("\n Candidate: " + candidate); // Find cost in parallel Tuple2<Double, Map<Integer, Double>> costAndProb; // if (transactions instanceof TransactionRDD) { // costAndProb = SparkEMStep.structuralEMStep(transactions, // inferenceAlgorithm, candidate); // } else { costAndProb = EMStep.structuralEMStep(transactions, inferenceAlgorithm, candidate); // } final double curCost = costAndProb._1; final Map<Integer, Double> prob = costAndProb._2; logger.finer(String.format(", cost: %.2f", curCost)); // Return if better collection of seqs found if (curCost < transactions.getAverageCost()) { logger.finer("\n Candidate Accepted.\n"); // Update cache with candidate Table<Sequence, Integer, Double> newSequences; // if (transactions instanceof TransactionRDD) { // newItemsets = SparkEMStep.addAcceptedCandidateCache( // transactions, candidate, prob); // } else { newSequences = EMStep.addAcceptedCandidateCache(transactions, candidate, prob); // } // Update sequences with newly inferred sequences sequences.clear(); sequences.putAll(newSequences); transactions.setAverageCost(curCost); return true; } // otherwise keep trying // No better candidate found return false; }