Java Code Examples for java.util.Map

The following are top voted examples for showing how to use java.util.Map. These examples are extracted from open source projects. You can vote up the examples you like and your votes will be used in our system to product more good examples.

Example 1
Project: nikeplus-fuelband-se-reversed   File: MainActivity.java View source code 7 votes vote down vote up
private void dumpServices( BluetoothGatt gatt )
{
	for( BluetoothGattService svc : gatt.getServices() )
   	{
   		String svc_uuid = svc.getUuid().toString(),
   			   svc_name = GATTAttributes.lookup( svc_uuid, "" );
   		Logger.d( "SERVICE " + svc_name + " ( " + svc_uuid + " )" );
   		
   		for( BluetoothGattCharacteristic chara : svc.getCharacteristics() )
   		{
   			String chr_uuid = chara.getUuid().toString(),
   				   chr_name = GATTAttributes.lookup( chr_uuid, "" );
   			int chr_props = chara.getProperties();
   			String props = "";
   			
   			Iterator it = propsMap.entrySet().iterator();
   			while( it.hasNext() ){
   				Map.Entry pairs = (Map.Entry)it.next();
   				
   				if( ( chr_props & (Integer)pairs.getKey() ) != 0 ){
   					props += pairs.getValue().toString() + " ";
   				}
   			}
   			
       		Logger.d( "    " + chr_name + " ( " + chr_uuid + " ) [" + props + "] : " + Utils.bytesToHex(chara.getValue() ) );
       		
       		for( BluetoothGattDescriptor desc : chara.getDescriptors() )
       		{
       			Logger.d( "      DESC: " + desc.getUuid() );
       		}
   		}
   	}
	
	Logger.d( "---------------------------------------------------------------------------" );
}
 
Example 2
Project: elasticsearch-osem   File: MappingProcessor.java View source code 6 votes vote down vote up
/**
 * Get the mapping for class
 *
 * @param clazz class to get mapping
 * @return map of the mapping
 */
public static Map<String, Object> getMapping(Class clazz) {
    String indexableName = getIndexTypeName(clazz);

    Map<String, Object> indexableMap = getIndexableMap(clazz);
    indexableMap.put("properties", getPropertiesMap(clazz));


    Map<String, Object> mapping = Maps.newHashMap();
    mapping.put(indexableName, indexableMap);
    return mapping;

}
 
Example 3
Project: lettuce   File: RedisClusterClientTest.java View source code 6 votes vote down vote up
@Test
public void testClusterLatencyMetrics() throws Exception {

    CommandLatencyCollector commandLatencyCollector = clusterClient.getResources().commandLatencyCollector();
    commandLatencyCollector.retrieveMetrics();
    testClusterRedirection();

    Map<CommandLatencyId, CommandMetrics> metrics = commandLatencyCollector.retrieveMetrics();
    CommandLatencyId node1 = findId(metrics, port1);
    CommandLatencyId node3 = findId(metrics, port3);

    CommandMetrics node1Metrics = metrics.get(node1);
    assertThat(node1Metrics.getCount()).isEqualTo(2); // the direct and the redirected one

    CommandMetrics node3Metrics = metrics.get(node3); // the redirected one
    assertThat(node3Metrics.getCount()).isEqualTo(1);
}
 
Example 4
Project: lettuce   File: SentinelRule.java View source code 6 votes vote down vote up
/**
 * Retrieve the port of the first found master.
 *
 * @param connections
 * @return
 */
public Integer getMasterPort(Map<Integer, RedisConnection<String, String>> connections) {

    for (Map.Entry<Integer, RedisConnection<String, String>> entry : connections.entrySet()) {

        List<Object> role = entry.getValue().role();

        RedisInstance redisInstance = RoleParser.parse(role);
        if (redisInstance.getRole() == RedisInstance.Role.MASTER) {
            return entry.getKey();
        }
    }
    return null;
}
 
Example 5
Project: jentrata-msh   File: CamelMessage.java View source code 6 votes vote down vote up
private Map<String, Object> mergeHeader(Map<String, Object> headers, Properties defaultConfig) {
    Map<String,Object> merged = new HashMap<String,Object>();
    merged.putAll(headers);
    for(String key:VALID_HEADER_KEYS) {
        if(!headers.containsKey(key)) {
            merged.put(key, defaultConfig.get(key));
        }
    }
    return merged;
}
 
Example 6
Project: lettuce   File: PooledClusterConnectionProvider.java View source code 6 votes vote down vote up
/**
 * Retrieve a set of PoolKey's for all pooled connections that are within the pool but not within the [email protected] Partitions}.
 * 
 * @return Set of [email protected] ConnectionKey}s
 */
private Set<ConnectionKey> getStaleConnectionKeys() {
    Map<ConnectionKey, RedisAsyncConnectionImpl<K, V>> map = Maps.newHashMap(connections.asMap());
    Set<ConnectionKey> stale = Sets.newHashSet();

    for (ConnectionKey connectionKey : map.keySet()) {

        if (connectionKey.nodeId != null && partitions.getPartitionByNodeId(connectionKey.nodeId) != null) {
            continue;
        }

        if (connectionKey.host != null && getPartition(connectionKey.host, connectionKey.port) != null) {
            continue;
        }
        stale.add(connectionKey);
    }
    return stale;
}
 
Example 7
Project: lettuce   File: CommandArgs.java View source code 6 votes vote down vote up
public CommandArgs<K, V> add(Map<K, V> map) {
    if (map.size() > 2) {
        realloc(buffer.capacity() + 16 * map.size());
    }

    for (Map.Entry<K, V> entry : map.entrySet()) {
        write(codec.encodeKey(entry.getKey()));
        write(codec.encodeValue(entry.getValue()));
    }

    return this;
}
 
Example 8
Project: lettuce   File: SentinelRule.java View source code 6 votes vote down vote up
/**
 * Check if a master runs on any of the given ports.
 *
 * @param redisPorts
 * @return
 */
public boolean hasMaster(int... redisPorts) {

    Map<Integer, RedisConnection<String, String>> connections = Maps.newHashMap();
    for (int redisPort : redisPorts) {
        connections.put(redisPort, redisClient.connect(RedisURI.Builder.redis(TestSettings.hostAddr(), redisPort).build()));
    }

    try {
        Integer masterPort = getMasterPort(connections);
        if (masterPort != null) {
            return true;
        }
    } finally {
        for (RedisConnection<String, String> commands : connections.values()) {
            commands.close();
        }
    }

    return false;
}
 
Example 9
Project: aws-toolkit-eclipse   File: DynamoDBTableEditor.java View source code 6 votes vote down vote up
/**
 * Marks the given tree item and column modified.
 *
 * TODO: type checking for numbers
 */
protected void markModified(final TableItem item, final Text editorControl, final int row, final int column, final Collection<String> newValue, int dataType) {
    final String attributeName = item.getParent().getColumn(column).getText();
    @SuppressWarnings("unchecked")
    Map<String, AttributeValue> dynamoDbItem = (Map<String, AttributeValue>) item.getData();
    AttributeValue attributeValue = dynamoDbItem.get(attributeName);
    if ( attributeValue == null ) {
        attributeValue = new AttributeValue();
        dynamoDbItem.put(attributeName, attributeValue);
    }

    setAttribute(attributeValue, newValue, dataType);

    Map<String, AttributeValue> editedItemKey = getKey(item);
    if ( !editedItems.containsKey(editedItemKey) ) {
        editedItems.add(editedItemKey, new EditedItem(dynamoDbItem, item));
    }

    // Don't add key attributes to the list of edited attributes
    if ( !attributeName.equals(tableKey.getHashKeyAttributeName())
            && ( !tableKey.hasRangeKey() || !attributeName.equals(tableKey.getRangeKeyAttributeName())) ) {
        editedItems.get(editedItemKey).markAttributeEdited(attributeName);
    }

    // We may already have another entry here, but since we're updating the
    // data model as we go, we can overwrite as many times as we want.
    editedItems.update(editedItemKey, dynamoDbItem);

    item.setText(column, format(attributeValue));

    // Treat the empty string as a null for easier saving
    if ( newValue.size() == 1 && newValue.iterator().next().length() == 0 ) {
        dynamoDbItem.remove(attributeName);
    }

    item.setForeground(column, Display.getDefault().getSystemColor(SWT.COLOR_RED));
    if ( editorControl != null )
        editorControl.setForeground(Display.getDefault().getSystemColor(SWT.COLOR_RED));
    markDirty();
}
 
Example 10
Project: elasticsearch-osem   File: MappingProcessor.java View source code 6 votes vote down vote up
private static void processIndexableProperties(AccessibleObject accessibleObject, Map<String, Object> propertiesMap) {
    IndexableProperties indexableProperties = accessibleObject.getAnnotation(IndexableProperties.class);
    if (indexableProperties == null) {
        throw new ElasticSearchOsemException("Unable to find annotation IndexableProperties");
    }
    if (indexableProperties.properties().length < 1) {
        throw new ElasticSearchOsemException("IndexableProperties must have at lease one IndexableProperty");
    }

    String fieldName = null;
    if (accessibleObject instanceof Field) {
        fieldName = ((Field) accessibleObject).getName();
    }
    if (!indexableProperties.name().isEmpty()) {
        fieldName = indexableProperties.name();
    }

    if (fieldName == null) {
        throw new ElasticSearchOsemException("Unable to find field name for IndexableProperties");
    }

    Map<String, Object> multiFieldMap = Maps.newHashMap();
    multiFieldMap.put("type", getFieldType(indexableProperties.type(), accessibleObject));

    if (indexableProperties.path() != MultiFieldPathEnum.NA) {
        multiFieldMap.put("path", indexableProperties.path().toString().toLowerCase());
    }

    boolean emptyNameProcessed = false;
    Map<String, Object> fieldsMap = Maps.newHashMap();
    for (IndexableProperty property : indexableProperties.properties()) {
        String propertyName = property.name();
        if (propertyName.isEmpty()) {
            if (!emptyNameProcessed) {
                emptyNameProcessed = true;
                propertyName = fieldName;
            } else {
                throw new ElasticSearchOsemException("Field name cannot be empty in multi-field");
            }
        }
        Map<String, Object> fieldMap = getIndexablePropertyMapping(accessibleObject, property);
        if (propertyName.equals(fieldName)) {
            multiFieldMap.putAll(fieldMap);
        } else {
            fieldsMap.put(propertyName, fieldMap);
        }
    }
    multiFieldMap.put("fields", fieldsMap);
    propertiesMap.put(fieldName, multiFieldMap);
}
 
Example 11
Project: ArubaSyslog   File: UserSessions.java View source code 6 votes vote down vote up
/**
   * Split the whole wifilogs into individual users.
   * @param input
   * @param output
   * @throws IOException
* @throws ParseException 
   */
  private static void splitAndExtract(String input, String output) throws IOException, ParseException{
      
  	File[] files = Utils.getInputFiles(input);
      Utils.createFolder(output);
      Arrays.sort(files);
      
      for ( File file : files){
      	Map<String, List<String>> macRecordMap = new HashMap<String, List<String>>();
      	long start  = System.currentTimeMillis();
      	System.out.println(start/1000 + " " + file.getName());
      	String line = null;
      	BufferedReader iReader  = new BufferedReader(new FileReader(file));
      	
      	System.out.println("Split syslogs into users...");
      	while ((line = iReader.readLine()) != null) {
      		String[] parts = line.split("\t", 2);
      		if (parts.length == 2){
      			String mac = parts[0];
      			if ( !macRecordMap.containsKey(mac) ){
      				macRecordMap.put(mac, new ArrayList<String>());
      			}
      			macRecordMap.get(mac).add(line);
      		}
      	}
      	iReader.close();
      	
      	System.out.println("Extract sessions...");
      	for ( String umac : macRecordMap.keySet()){
      		if ( Utils.debug )
      			System.out.println(umac);
      		
      		// extract session and save to file
              List<String> finalLines = SessionExtraction.extractSessions(macRecordMap.get(umac));
              
              // write into file
              BufferedWriter bw = new BufferedWriter(new FileWriter(FilenameUtils.concat(output, FilenameUtils.getName(file.getName())), true));
              for ( String l : finalLines )
                  bw.write(l + "\n");
              bw.close();
      	}
      	
      	macRecordMap.clear();
      	System.out.println(String.format("Elapsed time: %.3f sec",
                  (System.currentTimeMillis() - start)/1000.0));
      }
  }
 
Example 12
Project: keycloak   File: LDAPIdentityStoreRegistry.java View source code 6 votes vote down vote up
public LDAPIdentityStore getLdapStore(UserFederationProviderModel model) {
    LDAPIdentityStoreContext context = ldapStores.get(model.getId());

    // Ldap config might have changed for the realm. In this case, we must re-initialize
    Map<String, String> config = model.getConfig();
    if (context == null || !config.equals(context.config)) {
        logLDAPConfig(model.getDisplayName(), config);

        LDAPIdentityStore store = createLdapIdentityStore(config);
        context = new LDAPIdentityStoreContext(config, store);
        ldapStores.put(model.getId(), context);
    }
    return context.store;
}
 
Example 13
Project: logback-flume   File: FlumeConfigurationBuilder.java View source code 6 votes vote down vote up
protected void loadSinkGroups(final FlumeConfiguration.AgentConfiguration agentConf,
                              final Map<String, Sink> sinks, final NodeConfiguration conf) {
    final Set<String> sinkgroupNames = agentConf.getSinkgroupSet();
    final Map<String, ComponentConfiguration> compMap = agentConf.getSinkGroupConfigMap();
    final Map<String, String> usedSinks = new HashMap<String, String>();
    for (final String groupName : sinkgroupNames) {
        final ComponentConfiguration comp = compMap.get(groupName);
        if (comp != null) {
            final SinkGroupConfiguration groupConf = (SinkGroupConfiguration) comp;
            final List<String> groupSinkList = groupConf.getSinks();
            final List<Sink> groupSinks = new ArrayList<Sink>();
            for (final String sink : groupSinkList) {
                final Sink s = sinks.remove(sink);
                if (s == null) {
                    final String sinkUser = usedSinks.get(sink);
                    if (sinkUser != null) {
                        throw new ConfigurationException(String.format(
                            "Sink %s of group %s already in use by group %s", sink, groupName, sinkUser));
                    } else {
                        throw new ConfigurationException(String.format(
                            "Sink %s of group %s does not exist or is not properly configured", sink,
                            groupName));
                    }
                }
                groupSinks.add(s);
                usedSinks.put(sink, groupName);
            }
            final SinkGroup group = new SinkGroup(groupSinks);
            Configurables.configure(group, groupConf);
            conf.getSinkRunners().put(comp.getComponentName(), new SinkRunner(group.getProcessor()));
        }
    }
    // add any unassigned sinks to solo collectors
    for (final Map.Entry<String, Sink> entry : sinks.entrySet()) {
        if (!usedSinks.containsValue(entry.getKey())) {
            final SinkProcessor pr = new DefaultSinkProcessor();
            final List<Sink> sinkMap = new ArrayList<Sink>();
            sinkMap.add(entry.getValue());
            pr.setSinks(sinkMap);
            Configurables.configure(pr, new Context());
            conf.getSinkRunners().put(entry.getKey(), new SinkRunner(pr));
        }
    }
}
 
Example 14
Project: divolte-collector   File: DslRecordMapperTest.java View source code 6 votes vote down vote up
@Test
public void shouldMapAllGeoIpFields() throws IOException, InterruptedException, ClosedServiceException {
    /*
     * Have to work around not being able to create a HttpServerExchange a bit.
     * We setup a actual server just to do a request and capture the HttpServerExchange
     * instance. Then we setup a DslRecordMapper instance with a mock ip2geo lookup service,
     * we then use the previously captured exchange object against our locally created mapper
     * instance to test the ip2geo mapping (using the a mock lookup service).
     */
    setupServer("minimal-mapping.groovy");
    final EventPayload payload = request("http://www.example.com");

    final File geoMappingFile = File.createTempFile("geo-mapping", ".groovy");
    copyResourceToFile("geo-mapping.groovy", geoMappingFile);

    final ImmutableMap<String, Object> mappingConfig = ImmutableMap.of(
            "divolte.tracking.schema_mapping.mapping_script_file", geoMappingFile.getAbsolutePath(),
            "divolte.tracking.schema_file", avroFile.getAbsolutePath()
            );

    final Config geoConfig = ConfigFactory.parseMap(mappingConfig)
        .withFallback(ConfigFactory.parseResources("dsl-mapping-test.conf"))
        .withFallback(ConfigFactory.parseResources("reference-test.conf"));
    final ValidatedConfiguration vc = new ValidatedConfiguration(() -> geoConfig);

    final CityResponse mockResponseWithEverything = loadFromClassPath("/city-response-with-everything.json", new TypeReference<CityResponse>(){});
    final Map<String,Object> expectedMapping = loadFromClassPath("/city-response-expected-mapping.json", new TypeReference<Map<String,Object>>(){});

    final LookupService mockLookupService = mock(LookupService.class);
    when(mockLookupService.lookup(any())).thenReturn(Optional.of(mockResponseWithEverything));

    final DslRecordMapper mapper = new DslRecordMapper(
            vc,
            new Schema.Parser().parse(Resources.toString(Resources.getResource("TestRecord.avsc"), StandardCharsets.UTF_8)),
            Optional.of(mockLookupService));

    final GenericRecord record = mapper.newRecordFromExchange(payload.event);

    // Validate the results.
    verify(mockLookupService).lookup(any());
    verifyNoMoreInteractions(mockLookupService);
    expectedMapping.forEach((k, v) -> {
        final Object recordValue = record.get(k);
        assertEquals("Property " + k + " not mapped correctly.", v, recordValue);
    });

    Files.delete(geoMappingFile.toPath());
}
 
Example 15
Project: btpka3.github.com   File: Util.java View source code 6 votes vote down vote up
public static String printScheduler(Scheduler scheduler)
        throws SchedulerException {

    Map<String, Object> info = new LinkedHashMap<String, Object>();

    String schedulerName = scheduler.getSchedulerName();
    info.put("schedulerName", schedulerName);

    String schedulerInstanceId = scheduler.getSchedulerInstanceId();
    info.put("schedulerInstanceId", schedulerInstanceId);

    String calendarNames = scheduler.getCalendarNames().toString();
    info.put("calendarNames", calendarNames);

    return info.toString();
}
 
Example 16
Project: sisu-guice   File: ProxyFactory.java View source code 6 votes vote down vote up
IndicesCallbackFilter(List<Method> methods) {
  final Map<Object, Integer> indices = Maps.newHashMap();
  for (int i = 0; i < methods.size(); i++) {
    indices.put(MethodWrapper.create(methods.get(i)), i);
  }
  this.indices = indices;
  this.hashCode = indices.hashCode();
}
 
Example 17
Project: jboss-dmr   File: ObjectModelValue.java View source code 6 votes vote down vote up
@Override
ModelValue protect() {
    final Map<String, ModelNode> map = this.map;
    for (final ModelNode node : map.values()) {
        node.protect();
    }
    return map.getClass() == LinkedHashMap.class ? new ObjectModelValue(Collections.unmodifiableMap(map)) : this;
}
 
Example 18
Project: jena   File: TextQueryPF.java View source code 6 votes vote down vote up
@Override
public QueryIterator exec(QueryIterator input, PropFuncArg argSubject, Node predicate, PropFuncArg argObject, ExecutionContext execCxt) {
    if (textIndex == null) {
        if (!warningIssued) {
            log.warn("No text index - no text search performed") ;
            warningIssued = true ;
        }
        // Not a text dataset - no-op
        return input ;
    }
    
    // If the search term is already bound (i.e. it is not dynamically specified by the input QueryIterator), then we can issue the query once and cache the result
    Map<String,TextHit> textResults = null;
    StrMatch match = objectToStruct(argObject, execCxt, false);
    if (null != match) {
        List<TextHit> hits = query(match.getProperty(), match.getQueryString(), match.getLimit(), execCxt);
        textResults = new LinkedHashMap<String,TextHit>();
        for (TextHit hit : hits) {
            textResults.putIfAbsent(hit.getNode().getURI(), hit);
        }
    }
    
    return new RepeatApplyIteratorTextQuery(input, argSubject, predicate, argObject, execCxt, textResults) ;
}
 
Example 19
Project: divolte-collector   File: DslRecordMapping.java View source code 6 votes vote down vote up
final Optional<T> produce(final DivolteEvent divolteEvent,
                          final Map<String,Optional<?>> context) {
    @SuppressWarnings("unchecked")
    final Optional<T> result = memoize
            ? (Optional<T>)context.computeIfAbsent(identifier, (x) -> supplier.apply(divolteEvent, context))
            : supplier.apply(divolteEvent, context);
    return result;
}
 
Example 20
Project: EE3Helper   File: CommandAddItem.java View source code 6 votes vote down vote up
private void addItem(String name, int damageValue, float value)
{
	if(!registryNames.containsKey(name))
		return;
	
	ItemStack iStack = new ItemStack((Item) registryNames.getObject(name));
       Map<WrappedStack, EnergyValue> valuesPre = Helper.loadPre();
       
	iStack.setItemDamage(damageValue);
	
	WrappedStack w = WrappedStack.wrap(iStack);
	EnergyValue e = new EnergyValue(value);
	
       if (w != null && e != null && Float.compare(e.getValue(), 0) > 0)
       {
       	if(valuesPre.containsKey(w))
       		valuesPre.replace(w, e);
       	else
               valuesPre.put(w, e);
       	
       	PacketHandler.INSTANCE.sendToAll(new MessageSetEnergyValue(w, e));
       }

       EnergyValueRegistry.getInstance().setShouldRegenNextRestart(true);
       Helper.savePre(valuesPre);
       
       if(EE3Help.config.auto_oredict)
       {
       	CommandAddOreRange.addOreRange(name, damageValue, value);
       }
}
 
Example 21
Project: divolte-collector   File: QueryStringParser.java View source code 6 votes vote down vote up
private static void mergeIntoMap(final Map<String,List<String>> map, final String key, final String value) {
    map.compute(key, (ignored,existing) -> {
        if (existing != null) {
            existing.add(value);
            return existing;
        }
        return Lists.newArrayList(value);
     });
}
 
Example 22
Project: java8-tutorial   File: Maps1.java View source code 6 votes vote down vote up
public static void main(String[] args) {
    Map<Integer, String> map = new HashMap<>();

    for (int i = 0; i < 10; i++) {
        map.putIfAbsent(i, "val" + i);
    }

    map.forEach((id, val) -> System.out.println(val));


    map.computeIfPresent(3, (num, val) -> val + num);
    System.out.println(map.get(3));             // val33

    map.computeIfPresent(9, (num, val) -> null);
    System.out.println(map.containsKey(9));     // false

    map.computeIfAbsent(23, num -> "val" + num);
    System.out.println(map.containsKey(23));    // true

    map.computeIfAbsent(3, num -> "bam");
    System.out.println(map.get(3));             // val33

    System.out.println(map.getOrDefault(42, "not found"));      // not found

    map.remove(3, "val3");
    System.out.println(map.get(3));             // val33

    map.remove(3, "val33");
    System.out.println(map.get(3));             // null

    map.merge(9, "val9", (value, newValue) -> value.concat(newValue));
    System.out.println(map.get(9));             // val9

    map.merge(9, "concat", (value, newValue) -> value.concat(newValue));
    System.out.println(map.get(9));             // val9concat
}
 
Example 23
Project: elasticsearch-osem   File: MappingProcessor.java View source code 5 votes vote down vote up
private static Map<String, Object> getIndexableMap(Class clazz) {
    Map<String, Object> objectMap = Maps.newHashMap();

    Indexable indexable = (Indexable) clazz.getAnnotation(Indexable.class);
    if (indexable == null) {
        throw new ElasticSearchOsemException("Class " + clazz.getName() + " is not Indexable");
    }

    if (!indexable.indexAnalyzer().isEmpty()) {
        objectMap.put("index_analyzer", indexable.indexAnalyzer());
    }

    if (!indexable.searchAnalyzer().isEmpty()) {
        objectMap.put("search_analyzer", indexable.searchAnalyzer());
    }

    if (indexable.dynamicDateFormats().length > 0) {
        objectMap.put("dynamic_date_formats", Lists.newArrayList(indexable.dynamicDateFormats()));
    }

    if (!indexable.dateDetection().equals(DateDetectionEnum.NA)) {
        objectMap.put("date_detection", Boolean.valueOf(indexable.dateDetection().toString()));
    }

    if (!indexable.numericDetection().equals(NumericDetectionEnum.NA)) {
        objectMap.put("numeric_detection", Boolean.valueOf(indexable.numericDetection().toString()));
    }

    // handle _parent
    if (indexable.parentClass() != void.class) {
        Map<String, Object> parentMap = Maps.newHashMap();
        parentMap.put("type", getIndexTypeName(indexable.parentClass()));
        objectMap.put("_parent", parentMap);
    }

    // handle _id
    Field indexableIdField = OsemReflectionUtils.getIdField(clazz);
    Map<String, Object> idMap = getIndexableIdMap(indexableIdField);
    if (!idMap.isEmpty()) {
        objectMap.put("_id", idMap);
    }

    // handle _type
    Map<String, Object> typeMap = Maps.newHashMap();
    if (indexable.typeFieldStore()) {
        typeMap.put("store", "yes");
    }

    if (!indexable.typeFieldIndex().equals(IndexEnum.NA)) {
        typeMap.put("index", indexable.typeFieldIndex().toString().toLowerCase());
    }

    if (!typeMap.isEmpty()) {
        objectMap.put("_type", typeMap);
    }

    // handle _source
    Map<String, Object> sourceMap = Maps.newHashMap();
    if (!indexable.sourceFieldEnabled()) {
        sourceMap.put("enabled", Boolean.FALSE);
    }

    if (indexable.sourceFieldCompress()) {
        sourceMap.put("compress", Boolean.TRUE);
    }

    if (!indexable.sourceFieldCompressThreshold().isEmpty()) {
        sourceMap.put("compress_threshold", indexable.sourceFieldCompressThreshold());
    }

    if (indexable.sourceFieldIncludes().length > 0) {
        sourceMap.put("includes", Lists.newArrayList(indexable.sourceFieldIncludes()));
    }

    if (indexable.sourceFieldExcludes().length > 0) {
        sourceMap.put("excludes", Lists.newArrayList(indexable.sourceFieldExcludes()));
    }

    if (!sourceMap.isEmpty()) {
        objectMap.put("_source", sourceMap);
    }

    // handle _all
    Map<String, Object> allMap = Maps.newHashMap();
    if (!indexable.allFieldEnabled()) {
        allMap.put("enabled", Boolean.FALSE);
    }

    if (indexable.allFieldStore()) {
        allMap.put("store", "yes");
    }

    if (!indexable.allFieldTermVector().equals(TermVectorEnum.NA)) {
        allMap.put("term_vector", indexable.allFieldTermVector().toString().toLowerCase());
    }

    if (!indexable.allFieldAnalyzer().isEmpty()) {
        allMap.put("analyzer", indexable.allFieldAnalyzer());
    }

    if (!indexable.allFieldIndexAnalyzer().isEmpty()) {
        allMap.put("index_analyzer", indexable.allFieldIndexAnalyzer());
    }

    if (!indexable.allFieldSearchAnalyzer().isEmpty()) {
        allMap.put("search_analyzer", indexable.allFieldSearchAnalyzer());
    }

    if (!allMap.isEmpty()) {
        objectMap.put("_all", allMap);
    }

    // handle _analyzer
    Map<String, Object> analyzerMap = Maps.newHashMap();
    if (!indexable.analyzerFieldPath().isEmpty()) {
        analyzerMap.put("path", indexable.analyzerFieldPath());
    }

    if (!analyzerMap.isEmpty()) {
        objectMap.put("_analyzer", analyzerMap);
    }

    // handle _boost
    Map<String, Object> boostMap = Maps.newHashMap();
    if (!indexable.boostFieldName().isEmpty()) {
        boostMap.put("name", indexable.boostFieldName());
    }

    if (indexable.boostFieldNullValue() != Double.MIN_VALUE) {
        boostMap.put("null_value", indexable.boostFieldNullValue());
    }

    if (!boostMap.isEmpty()) {
        objectMap.put("_boost", boostMap);
    }

    // handle _routing
    Map<String, Object> routingMap = Maps.newHashMap();
    if (!indexable.routingFieldStore()) {
        routingMap.put("store", "no");
    }

    if (!indexable.routingFieldIndex().equals(IndexEnum.NA)) {
        routingMap.put("index", indexable.routingFieldIndex().toString().toLowerCase());
    }

    if (indexable.routingFieldRequired()) {
        routingMap.put("required", Boolean.TRUE);
    }

    if (!indexable.routingFieldPath().isEmpty()) {
        routingMap.put("path", indexable.routingFieldPath());
    }

    if (!routingMap.isEmpty()) {
        objectMap.put("_routing", routingMap);
    }

    // handle _index
    Map<String, Object> indexMap = Maps.newHashMap();
    if (indexable.indexFieldEnabled()) {
        indexMap.put("enabled", Boolean.TRUE);
    }

    if (!indexMap.isEmpty()) {
        objectMap.put("_index", indexMap);
    }

    // handle _size
    Map<String, Object> sizeMap = Maps.newHashMap();
    if (indexable.sizeFieldEnabled()) {
        sizeMap.put("enabled", Boolean.TRUE);
    }

    if (indexable.sizeFieldStore()) {
        sizeMap.put("store", "yes");
    }

    if (!sizeMap.isEmpty()) {
        objectMap.put("_size", sizeMap);
    }

    // handle _timestamp
    Map<String, Object> timestampMap = Maps.newHashMap();
    if (indexable.timestampFieldEnabled()) {
        timestampMap.put("enabled", Boolean.TRUE);
    }

    if (indexable.timestampFieldStore()) {
        timestampMap.put("store", "yes");
    }

    if (!indexable.timestampFieldIndex().equals(IndexEnum.NA)) {
        timestampMap.put("index", indexable.timestampFieldIndex().toString().toLowerCase());
    }

    if (!indexable.timestampFieldPath().isEmpty()) {
        timestampMap.put("path", indexable.timestampFieldPath());
    }

    if (!indexable.timestampFieldFormat().isEmpty()) {
        timestampMap.put("format", indexable.timestampFieldFormat());
    }

    if (!timestampMap.isEmpty()) {
        objectMap.put("_timestamp", timestampMap);
    }

    // handle _ttl
    Map<String, Object> ttlMap = Maps.newHashMap();
    if (indexable.ttlFieldEnabled()) {
        ttlMap.put("enabled", Boolean.TRUE);
    }

    if (!indexable.ttlFieldStore()) {
        ttlMap.put("store", "no");
    }

    if (!indexable.ttlFieldIndex().equals(IndexEnum.NA)) {
        ttlMap.put("index", indexable.ttlFieldIndex().toString().toLowerCase());
    }

    if (!indexable.ttlFieldDefault().isEmpty()) {
        ttlMap.put("default", indexable.ttlFieldDefault());
    }

    if (!ttlMap.isEmpty()) {
        objectMap.put("_ttl", ttlMap);
    }

    return objectMap;
}
 
Example 24
Project: jenetics   File: DieHarder.java View source code 5 votes vote down vote up
private static void test(final String randomName, final Seq<String> args)
	throws IOException, InterruptedException
{
	final Random random;
	try {
		random = (Random)Class.forName(randomName).newInstance();
		printt(
			"Testing: %s (%s)",
			randomName,
			new SimpleDateFormat("yyyy-MM-dd HH:mm").format(new Date())
		);
	} catch (Exception e) {
		println("Can't create random class '%s'.", randomName);
		return;
	}

	final List<String> dieharderArgs = new ArrayList<>();
	dieharderArgs.add("dieharder");
	dieharderArgs.addAll(args.asList());
	dieharderArgs.add("-g");
	dieharderArgs.add("200");

	printv();

	final long start = System.currentTimeMillis();
	final ProcessBuilder builder = new ProcessBuilder(dieharderArgs);
	final Process dieharder = builder.start();

	final Randomizer randomizer = new Randomizer(
		random,
		dieharder.getOutputStream()
	);
	final Thread randomizerThread = new Thread(randomizer);
	randomizerThread.start();

	// The dieharder console output.
	final BufferedReader stdout = new BufferedReader (
		new InputStreamReader(dieharder.getInputStream())
	);

	final List<Result> results = new ArrayList<>();
	for (String l = stdout.readLine(); l != null; l = stdout.readLine()) {
		Result.parse(l).ifPresent(results::add);
		System.out.println(l);
	}

	dieharder.waitFor();
	randomizerThread.interrupt();

	final long millis = System.currentTimeMillis() - start;
	final long sec = millis/1000;
	final double megaBytes = randomizer.getCount()/(1024.0*1024.0);

	// Calculate statistics.
	final Map<Assessment, Long> grouped = results.stream()
		.collect(groupingBy(r -> r.assessment, counting()));

	final long passed = grouped.getOrDefault(Assessment.PASSED, 0L);
	final long weak = grouped.getOrDefault(Assessment.WEAK, 0L);
	final long failed = grouped.getOrDefault(Assessment.FAILED, 0L);

	final NumberFormat formatter = NumberFormat.getIntegerInstance();
	formatter.setMinimumFractionDigits(3);
	formatter.setMaximumFractionDigits(3);

	println("#=============================================================================#");
	println(
		"# %-76s#",
		format("Summary: PASSED=%d, WEAK=%d, FAILED=%d", passed, weak, failed)
	);
	println(
		"# %-76s#",
		format("         %s MB of random data created with %s MB/sec",
			formatter.format(megaBytes),
			formatter.format(megaBytes/(millis/1000.0))
		)
	);
	println("#=============================================================================#");
	printt("Runtime: %d:%02d:%02d", sec/3600, (sec%3600)/60, sec%60);

}