com.google.api.client.util.Maps Java Examples
The following examples show how to use
com.google.api.client.util.Maps.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AppEngineDataStoreFactory.java From google-http-java-client with Apache License 2.0 | 6 votes |
@Override public Collection<V> values() throws IOException { lock.lock(); try { // Unfortunately no getKeys() method on MemcacheService, so the only option is to clear all // and re-populate the memcache from scratch. This is clearly inefficient. if (memcache != null) { memcache.clearAll(); } List<V> result = Lists.newArrayList(); Map<String, V> map = memcache != null ? Maps.<String, V>newHashMap() : null; for (Entity entity : query(false)) { V value = deserialize(entity); result.add(value); if (map != null) { map.put(entity.getKey().getName(), value); } } if (memcache != null) { memcache.putAll(map, memcacheExpiration); } return Collections.unmodifiableList(result); } finally { lock.unlock(); } }
Example #2
Source File: FileDataStoreFactory.java From google-http-java-client with Apache License 2.0 | 6 votes |
FileDataStore(FileDataStoreFactory dataStore, File dataDirectory, String id) throws IOException { super(dataStore, id); this.dataFile = new File(dataDirectory, id); // error if it is a symbolic link if (IOUtils.isSymbolicLink(dataFile)) { throw new IOException("unable to use a symbolic link: " + dataFile); } // create new file (if necessary) if (dataFile.createNewFile()) { keyValueMap = Maps.newHashMap(); // save the credentials to create a new file save(); } else { // load credentials from existing file keyValueMap = IOUtils.deserialize(new FileInputStream(dataFile)); } }
Example #3
Source File: FileDataStoreFactory.java From google-http-java-client with Apache License 2.0 | 6 votes |
FileDataStore(FileDataStoreFactory dataStore, File dataDirectory, String id) throws IOException { super(dataStore, id); this.dataFile = new File(dataDirectory, id); // error if it is a symbolic link if (IOUtils.isSymbolicLink(dataFile)) { throw new IOException("unable to use a symbolic link: " + dataFile); } // create new file (if necessary) if (dataFile.createNewFile()) { keyValueMap = Maps.newHashMap(); // save the credentials to create a new file save(); } else { // load credentials from existing file keyValueMap = IOUtils.deserialize(new FileInputStream(dataFile)); } }
Example #4
Source File: CustomDataStoreFactory.java From hop with Apache License 2.0 | 5 votes |
CustomDataStore( CustomDataStoreFactory dataStore, File dataDirectory, String id ) throws IOException { super( dataStore, id ); this.dataDirectory = dataDirectory; this.dataFile = new File( this.dataDirectory, getId() ); if ( IOUtils.isSymbolicLink( this.dataFile ) ) { throw new IOException( "unable to use a symbolic link: " + this.dataFile ); } this.keyValueMap = Maps.newHashMap(); if ( this.dataFile.exists() ) { this.keyValueMap = IOUtils.deserialize( new FileInputStream( this.dataFile ) ); } }
Example #5
Source File: VerifyContext.java From PeonyFramwork with Apache License 2.0 | 5 votes |
public VerifyContext(Map<Class<?>, ConfigContainer<?, IConfig<?>>> configContainers) { this.configContainers = configContainers; this.configContainersByFileName = Maps.newHashMap(); for (ConfigContainer<?, IConfig<?>> t : configContainers.values()) { this.configContainersByFileName.put(t.getMetaData().getFileName(), t); } }
Example #6
Source File: SchemaRepository.java From endpoints-java with Apache License 2.0 | 5 votes |
/** * Gets all schema for an API config. * * @return a {@link Map} from {@link TypeToken} to {@link Schema}. If there are no schema for this * config, an empty map is returned. */ private Map<TypeToken<?>, Schema> getAllTypesForConfig(ApiConfig config) { Map<TypeToken<?>, Schema> typesForConfig = types.get(config.getSerializationConfig()); if (typesForConfig == null) { typesForConfig = Maps.newLinkedHashMap(); types.put(config.getSerializationConfig(), typesForConfig); } return typesForConfig; }
Example #7
Source File: FlinkStreamingTransformTranslators.java From flink-dataflow with Apache License 2.0 | 5 votes |
private Map<TupleTag<?>, Integer> transformTupleTagsToLabels(TupleTag<?> mainTag, Set<TupleTag<?>> secondaryTags) { Map<TupleTag<?>, Integer> tagToLabelMap = Maps.newHashMap(); tagToLabelMap.put(mainTag, MAIN_TAG_INDEX); int count = MAIN_TAG_INDEX + 1; for (TupleTag<?> tag : secondaryTags) { if (!tagToLabelMap.containsKey(tag)) { tagToLabelMap.put(tag, count++); } } return tagToLabelMap; }
Example #8
Source File: PlatformConverterUtil.java From cloudbreak with Apache License 2.0 | 5 votes |
public static <T extends StringType> Map<String, String> convertDefaults(Map<Platform, T> vms) { Map<String, String> result = Maps.newHashMap(); for (Entry<Platform, T> entry : vms.entrySet()) { result.put(entry.getKey().value(), entry.getValue().value()); } return result; }
Example #9
Source File: PlatformConverterUtil.java From cloudbreak with Apache License 2.0 | 5 votes |
public static <P extends StringType, T extends StringType, C extends Collection<T>> Map<String, Collection<String>> convertPlatformMap(Map<P, C> vms) { Map<String, Collection<String>> result = Maps.newHashMap(); for (Entry<P, C> entry : vms.entrySet()) { result.put(entry.getKey().value(), convertList(entry.getValue())); } return result; }
Example #10
Source File: PlatformConverterUtil.java From cloudbreak with Apache License 2.0 | 5 votes |
public static <P extends StringType, T extends StringType, C extends StringType> Map<String, Map<String, String>> convertDisplayNameMap(Map<P, Map<T, C>> dNs) { Map<String, Map<String, String>> result = Maps.newHashMap(); for (Entry<P, Map<T, C>> entry : dNs.entrySet()) { Map<String, String> innerMap = new HashMap<>(); for (Entry<T, C> tzEntry : entry.getValue().entrySet()) { innerMap.put(tzEntry.getKey().value(), tzEntry.getValue().value()); } result.put(entry.getKey().value(), innerMap); } return result; }
Example #11
Source File: CustomDataStoreFactory.java From pentaho-kettle with Apache License 2.0 | 5 votes |
CustomDataStore( CustomDataStoreFactory dataStore, File dataDirectory, String id ) throws IOException { super( dataStore, id ); this.dataDirectory = dataDirectory; this.dataFile = new File( this.dataDirectory, getId() ); if ( IOUtils.isSymbolicLink( this.dataFile ) ) { throw new IOException( "unable to use a symbolic link: " + this.dataFile ); } this.keyValueMap = Maps.newHashMap(); if ( this.dataFile.exists() ) { this.keyValueMap = (HashMap) IOUtils.deserialize( new FileInputStream( this.dataFile ) ); } }
Example #12
Source File: FlinkBatchTransformTranslators.java From flink-dataflow with Apache License 2.0 | 4 votes |
@Override public void translateNode(ParDo.BoundMulti<IN, OUT> transform, FlinkBatchTranslationContext context) { DataSet<IN> inputDataSet = context.getInputDataSet(context.getInput(transform)); final DoFn<IN, OUT> doFn = transform.getFn(); Map<TupleTag<?>, PCollection<?>> outputs = context.getOutput(transform).getAll(); Map<TupleTag<?>, Integer> outputMap = Maps.newHashMap(); // put the main output at index 0, FlinkMultiOutputDoFnFunction also expects this outputMap.put(transform.getMainOutputTag(), 0); int count = 1; for (TupleTag<?> tag: outputs.keySet()) { if (!outputMap.containsKey(tag)) { outputMap.put(tag, count++); } } // collect all output Coders and create a UnionCoder for our tagged outputs List<Coder<?>> outputCoders = Lists.newArrayList(); for (PCollection<?> coll: outputs.values()) { outputCoders.add(coll.getCoder()); } UnionCoder unionCoder = UnionCoder.of(outputCoders); @SuppressWarnings("unchecked") TypeInformation<RawUnionValue> typeInformation = new CoderTypeInformation<>(unionCoder); @SuppressWarnings("unchecked") FlinkMultiOutputDoFnFunction<IN, OUT> doFnWrapper = new FlinkMultiOutputDoFnFunction(doFn, context.getPipelineOptions(), outputMap); MapPartitionOperator<IN, RawUnionValue> outputDataSet = new MapPartitionOperator<>(inputDataSet, typeInformation, doFnWrapper, transform.getName()); transformSideInputs(transform.getSideInputs(), outputDataSet, context); for (Map.Entry<TupleTag<?>, PCollection<?>> output: outputs.entrySet()) { TypeInformation<Object> outputType = context.getTypeInfo(output.getValue()); int outputTag = outputMap.get(output.getKey()); FlinkMultiOutputPruningFunction<Object> pruningFunction = new FlinkMultiOutputPruningFunction<>(outputTag); FlatMapOperator<RawUnionValue, Object> pruningOperator = new FlatMapOperator<>(outputDataSet, outputType, pruningFunction, output.getValue().getName()); context.setOutputDataSet(output.getValue(), pruningOperator); } }