org.elasticsearch.common.xcontent.support.XContentMapValues Java Examples

The following examples show how to use org.elasticsearch.common.xcontent.support.XContentMapValues. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FieldReadCallback.java    From deprecated-security-advanced-modules with Apache License 2.0 6 votes vote down vote up
public FieldReadCallback(final ThreadContext threadContext, final IndexService indexService,
        final ClusterService clusterService, final ComplianceConfig complianceConfig, final AuditLog auditLog,
        final Set<String> maskedFields, ShardId shardId) {
    super();
    //this.threadContext = Objects.requireNonNull(threadContext);
    //this.clusterService = Objects.requireNonNull(clusterService);
    this.index = Objects.requireNonNull(indexService).index();
    this.complianceConfig = complianceConfig;
    this.auditLog = auditLog;
    this.maskedFields = maskedFields;
    this.shardId = shardId;
    try {
        sfc = (SourceFieldsContext) HeaderHelper.deserializeSafeFromHeader(threadContext, "_opendistro_security_source_field_context");
        if(sfc != null && sfc.hasIncludesOrExcludes()) {
            if(log.isTraceEnabled()) {
                log.trace("_opendistro_security_source_field_context: "+sfc);
            }

            filterFunction = XContentMapValues.filter(sfc.getIncludes(), sfc.getExcludes());
        }
    } catch (Exception e) {
        if(log.isDebugEnabled()) {
            log.debug("Cannot deserialize _opendistro_security_source_field_context because of {}", e.toString());
        }
    }
}
 
Example #2
Source File: S3River.java    From es-amazon-s3-river with Apache License 2.0 6 votes vote down vote up
private boolean isStarted(){
   // Refresh index before querying it.
   client.admin().indices().prepareRefresh("_river").execute().actionGet();
   GetResponse isStartedGetResponse = client.prepareGet("_river", riverName().name(), "_s3status").execute().actionGet();
   try{
      if (!isStartedGetResponse.isExists()){
         XContentBuilder xb = jsonBuilder().startObject()
               .startObject("amazon-s3")
                  .field("feedname", feedDefinition.getFeedname())
                  .field("status", "STARTED").endObject()
               .endObject();
         client.prepareIndex("_river", riverName.name(), "_s3status").setSource(xb).execute();
         return true;
      } else {
         String status = (String)XContentMapValues.extractValue("amazon-s3.status", isStartedGetResponse.getSourceAsMap());
         if ("STOPPED".equals(status)){
            return false;
         }
      }
   } catch (Exception e){
      logger.warn("failed to get status for " + riverName().name() + ", throttling....", e);
   }
   return true;
}
 
Example #3
Source File: S3RiverUtil.java    From es-amazon-s3-river with Apache License 2.0 6 votes vote down vote up
/**
 * Extract array from settings (array or ; delimited String)
 * @param settings Settings
 * @param path Path to settings definition
 * @return Array of settings
 */
@SuppressWarnings("unchecked")
public static String[] buildArrayFromSettings(Map<String, Object> settings, String path){
   String[] includes;

   // We manage comma separated format and arrays
   if (XContentMapValues.isArray(XContentMapValues.extractValue(path, settings))) {
      List<String> includesarray = (List<String>) XContentMapValues.extractValue(path, settings);
      int i = 0;
      includes = new String[includesarray.size()];
      for (String include : includesarray) {
         includes[i++] = trimAllWhitespace(include);
      }
   } else {
      String includedef = (String) XContentMapValues.extractValue(path, settings);
      includes = Strings.commaDelimitedListToStringArray(trimAllWhitespace(includedef));
   }
   
   String[] uniquelist = removeDuplicateStrings(includes);
   
   return uniquelist;
}
 
Example #4
Source File: StandardnumberMapper.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
@SuppressWarnings({"unchecked", "rawtypes"})
@Override
public Mapper.Builder parse(String name, Map<String, Object> mapping, ParserContext parserContext) {
    StandardnumberMapper.Builder builder = new Builder(name, service);
    Iterator<Map.Entry<String, Object>> iterator = mapping.entrySet().iterator();
    while (iterator.hasNext()) {
        Map.Entry<String, Object> entry = iterator.next();
        String fieldName = entry.getKey();
        Object fieldNode = entry.getValue();
        switch (fieldName) {
            case "standardnumbers" :
                builder.settingsBuilder.putList("standardnumbers", XContentMapValues.nodeStringArrayValue(fieldNode));
                iterator.remove();
                break;
            default:
                break;
        }
    }
    return builder;
}
 
Example #5
Source File: GitHubRiver.java    From elasticsearch-river-github with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings({"unchecked"})
@Inject
public GitHubRiver(RiverName riverName, RiverSettings settings, Client client) {
    super(riverName, settings);
    this.client = client;

    if (!settings.settings().containsKey("github")) {
        throw new IllegalArgumentException("Need river settings - owner and repository.");
    }

    // get settings
    Map<String, Object> githubSettings = (Map<String, Object>) settings.settings().get("github");
    owner = XContentMapValues.nodeStringValue(githubSettings.get("owner"), null);
    repository = XContentMapValues.nodeStringValue(githubSettings.get("repository"), null);

    index = String.format("%s&%s", owner, repository);
    userRequestedInterval = XContentMapValues.nodeIntegerValue(githubSettings.get("interval"), 60);

    // auth (optional)
    username = null;
    password = null;
    if (githubSettings.containsKey("authentication")) {
        Map<String, Object> auth = (Map<String, Object>) githubSettings.get("authentication");
        username = XContentMapValues.nodeStringValue(auth.get("username"), null);
        password = XContentMapValues.nodeStringValue(auth.get("password"), null);
    }

    // endpoint (optional - default to github.com)
    endpoint = XContentMapValues.nodeStringValue(githubSettings.get("endpoint"), "https://api.github.com");

    logger.info("Created GitHub river.");
}
 
Example #6
Source File: HBaseRiver.java    From Elasticsearch-HBase-River with Apache License 2.0 5 votes vote down vote up
/**
 * Fetch the value of a configuration that has a default value and is therefore optional.
 * 
 * @param config Key of the configuration to fetch
 * @param defaultValue The value to set if no value could be found
 * @return
 */
@SuppressWarnings({ "unchecked" })
private String readConfig(final String config, final String defaultValue) {
	if (this.settings.settings().containsKey(CONFIG_SPACE)) {
		Map<String, Object> mysqlSettings = (Map<String, Object>) this.settings.settings().get(CONFIG_SPACE);
		return XContentMapValues.nodeStringValue(mysqlSettings.get(config), defaultValue);
	}
	return defaultValue;
}
 
Example #7
Source File: Neo4jDriver.java    From elasticsearch-river-neo4j with Apache License 2.0 5 votes vote down vote up
@Inject
public Neo4jDriver(RiverName riverName, RiverSettings settings, @RiverIndexName final String riverIndexName, final Client client) {
    super(riverName, settings);
    this.client = client;

    uri = XContentMapValues.nodeStringValue(XContentMapValues.extractValue("neo4j.uri", settings.settings()), DEFAULT_NEO_URI);
    List<Object> neo4jLabels = XContentMapValues.extractRawValues("neo4j.labels", settings.settings());
    String label;
    if(XContentMapValues.isArray(neo4jLabels)) {
        for (Object neo4jLabel : neo4jLabels) {
            label = XContentMapValues.nodeStringValue(neo4jLabel, null);
            labels.add(DynamicLabel.label(label));
        }
    }
    timestampField = XContentMapValues.nodeStringValue(XContentMapValues.extractValue("neo4j.timestampField", settings.settings()), DEFAULT_NEO_TIMESTAMP_FIELD);
    interval = XContentMapValues.nodeIntegerValue(XContentMapValues.extractValue("neo4j.interval", settings.settings()), DEFAULT_NEO_INTERVAL);
    index = XContentMapValues.nodeStringValue(XContentMapValues.extractValue("index.name", settings.settings()), DEFAULT_NEO_INDEX);
    type = XContentMapValues.nodeStringValue(XContentMapValues.extractValue("index.type", settings.settings()), DEFAULT_NEO_TYPE);
    indexFromLabel = XContentMapValues.nodeStringValue(XContentMapValues.extractValue("index.name.label",
        settings.settings()), null);
    typeFromLabel = XContentMapValues.nodeStringValue(XContentMapValues.extractValue("index.type.label",
        settings.settings()), null);

    logger.debug("Neo4j settings [uri={}]", new Object[]{uri});
    logger.debug("River settings [indexName={}, type={}, interval={}, timestampField={}, indexLabel={}, " +
            "typelabel={}]",
        new Object[]{index,
            type,
            interval,
            timestampField,
            indexFromLabel,
            typeFromLabel}
    );

}
 
Example #8
Source File: IMAPImporter.java    From elasticsearch-imap with Apache License 2.0 5 votes vote down vote up
private void getUserLogins(final Map<String, Object> imapSettings) {
    String userSource = XContentMapValues.nodeStringValue(imapSettings.get("user_source"), null);
    ILoginSource source = null;

    if ("ldap".equals(userSource)) {
        //master user credentials for Dovecot
        String masterUser = XContentMapValues.nodeStringValue(imapSettings.get("master_user"), null);
        String masterPassword = XContentMapValues.nodeStringValue(imapSettings.get("master_password"), null);
        source = new LdapLoginSource(imapSettings, masterUser, masterPassword);
    } else {
        //read logins directly
        String _user = XContentMapValues.nodeStringValue(imapSettings.get("user"), null);
        String _password = XContentMapValues.nodeStringValue(imapSettings.get("password"), null);

        if (_user != null && !_user.isEmpty()) {
            users.add(_user);
            passwords.add(_password);
        }

        List<String> _users = arrayNodeToList(imapSettings.get("users"));
        List<String> _passwords = arrayNodeToList(imapSettings.get("passwords"));

        //TODO: inject master user credentials?
        if (_users != null && !_users.isEmpty()) {
            users.addAll(_users);
            passwords.addAll(_passwords);
        }
    }

    //read from generic source
    if (source != null) {
        users.addAll(source.getUserNames());
        passwords.addAll(source.getUserPasswords());
    }
}
 
Example #9
Source File: IMAPImporter.java    From elasticsearch-imap with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private List<String> arrayNodeToList(Object arrayNode) {
    ArrayList<String> list = new ArrayList<>();
    if(XContentMapValues.isArray(arrayNode)) {
        for(Object node : (List<Object>) arrayNode) {
            String value = XContentMapValues.nodeStringValue(node, null);
            if(value != null) {
                list.add(value);
            }
        }
    }
    return list;
}
 
Example #10
Source File: LdapLoginSource.java    From elasticsearch-imap with Apache License 2.0 5 votes vote down vote up
public LdapLoginSource(Map<String, Object> settings, String masterUser, String masterPassword) {
fUserNames = new ArrayList<>();
fUserPasswords = new ArrayList<>();

fMasterUser = masterUser;
fMasterPassword = masterPassword;

String url = XContentMapValues.nodeStringValue(settings.get("ldap_url"), null);
String base = XContentMapValues.nodeStringValue(settings.get("ldap_base"), null);
String user = XContentMapValues.nodeStringValue(settings.get("ldap_user"), null);
String password = XContentMapValues.nodeStringValue(settings.get("ldap_password"), null);

fConnector = new SimpleLdapConnector(url, base, user, password, true);

fNameField = XContentMapValues.nodeStringValue(settings.get("ldap_name_field"), DEF_USER_NAME_FIELD);
fPasswordField = XContentMapValues.nodeStringValue(settings.get("ldap_password_field"), DEF_USER_PW_FIELD);
fLdapFilter = fNameField + "=*";

fLogger = ESLoggerFactory.getLogger(LdapLoginSource.class.getName());

fLock = new Object();
fInitialized = false;

//start refreshing thread once initialized; interval in minutes
String refreshParam = XContentMapValues.nodeStringValue(settings.get("ldap_refresh_interval"), DEF_REFRESH_INT);
fRefreshInt = Long.parseLong(refreshParam) * 60000L;
if(fRefreshInt > 0) {
    //TODO: actually stop refreshing thread somehow
    fActive = true;
    Thread t = new Thread(this);
    t.setDaemon(true);
    t.start();
}
   }
 
Example #11
Source File: IsPrimeSearchScriptFactory.java    From elasticsearch-native-script-example with Apache License 2.0 5 votes vote down vote up
/**
 * This method is called for every search on every shard.
 *
 * @param params list of script parameters passed with the query
 * @return new native script
 */
@Override
public ExecutableScript newScript(@Nullable Map<String, Object> params) {
    // Example of a mandatory string parameter
    // The XContentMapValues helper class can be used to simplify parameter parsing
    String fieldName = params == null ? null : XContentMapValues.nodeStringValue(params.get("field"), defaultFieldName);
    if (!Strings.hasLength(fieldName)) {
        throw new IllegalArgumentException("Missing the field parameter");
    }

    // Example of an optional integer  parameter
    int certainty = params == null ? 10 : XContentMapValues.nodeIntegerValue(params.get("certainty"), 10);
    return new IsPrimeSearchScript(fieldName, certainty);
}
 
Example #12
Source File: GeoShapeFieldMapper.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
    Builder builder = geoShapeField(name);
    for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
        Map.Entry<String, Object> entry = iterator.next();
        String fieldName = Strings.toUnderscoreCase(entry.getKey());
        Object fieldNode = entry.getValue();
        if (Names.TREE.equals(fieldName)) {
            builder.fieldType().setTree(fieldNode.toString());
            iterator.remove();
        } else if (Names.TREE_LEVELS.equals(fieldName)) {
            builder.fieldType().setTreeLevels(Integer.parseInt(fieldNode.toString()));
            iterator.remove();
        } else if (Names.TREE_PRESISION.equals(fieldName)) {
            builder.fieldType().setPrecisionInMeters(DistanceUnit.parse(fieldNode.toString(), DistanceUnit.DEFAULT, DistanceUnit.DEFAULT));
            iterator.remove();
        } else if (Names.DISTANCE_ERROR_PCT.equals(fieldName)) {
            builder.fieldType().setDistanceErrorPct(Double.parseDouble(fieldNode.toString()));
            iterator.remove();
        } else if (Names.ORIENTATION.equals(fieldName)) {
            builder.fieldType().setOrientation(ShapeBuilder.orientationFromString(fieldNode.toString()));
            iterator.remove();
        } else if (Names.STRATEGY.equals(fieldName)) {
            builder.fieldType().setStrategyName(fieldNode.toString());
            iterator.remove();
        } else if (Names.COERCE.equals(fieldName)) {
            builder.coerce(nodeBooleanValue(fieldNode));
            iterator.remove();
        } else if (Names.STRATEGY_POINTS_ONLY.equals(fieldName)
            && builder.fieldType().strategyName.equals(SpatialStrategy.TERM.getStrategyName()) == false) {
            builder.fieldType().setPointsOnly(XContentMapValues.nodeBooleanValue(fieldNode));
            iterator.remove();
        }
    }
    return builder;
}
 
Example #13
Source File: FieldAliasMapper.java    From crate with Apache License 2.0 5 votes vote down vote up
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext)
    throws MapperParsingException {

    FieldAliasMapper.Builder builder = new FieldAliasMapper.Builder(name);
    Object pathField = node.remove(Names.PATH);
    String path = XContentMapValues.nodeStringValue(pathField, null);
    if (path == null) {
        throw new MapperParsingException("The [path] property must be specified for field [" + name + "].");
    }
    return builder.path(path);
}
 
Example #14
Source File: KeywordFieldMapper.java    From crate with Apache License 2.0 5 votes vote down vote up
@Override
public Mapper.Builder<?,?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
    KeywordFieldMapper.Builder builder = new KeywordFieldMapper.Builder(name);
    parseField(builder, name, node, parserContext);
    for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
        Map.Entry<String, Object> entry = iterator.next();
        String propName = entry.getKey();
        Object propNode = entry.getValue();
        if (propName.equals("null_value")) {
            if (propNode == null) {
                throw new MapperParsingException("Property [null_value] cannot be null.");
            }
            builder.nullValue(propNode.toString());
            iterator.remove();
        } else if (propName.equals("ignore_above")) {
            builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1));
            iterator.remove();
        } else if (propName.equals("length_limit")) {
            builder.lengthLimit(XContentMapValues.nodeIntegerValue(propNode, -1));
            iterator.remove();
        } else if (propName.equals("norms")) {
            TypeParsers.parseNorms(builder, name, propNode);
            iterator.remove();
        } else if (propName.equals("eager_global_ordinals")) {
            builder.eagerGlobalOrdinals(XContentMapValues.nodeBooleanValue(propNode, "eager_global_ordinals"));
            iterator.remove();
        } else if (propName.equals("normalizer")) {
            if (propNode != null) {
                builder.normalizer(parserContext.getIndexAnalyzers(), propNode.toString());
            }
            iterator.remove();
        } else if (propName.equals("split_queries_on_whitespace")) {
            builder.splitQueriesOnWhitespace(XContentMapValues.nodeBooleanValue(propNode, "split_queries_on_whitespace"));
            iterator.remove();
        }
    }
    return builder;
}
 
Example #15
Source File: TransportShardUpsertAction.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
@Override
public Function<GetResult, Object> build(final Reference reference, SymbolToFieldExtractor.Context context) {
    return new Function<GetResult, Object>() {
        @Override
        public Object apply(GetResult getResult) {
            if (getResult == null) {
                return null;
            }
            return reference.valueType().value(XContentMapValues.extractValue(
                    reference.info().ident().columnIdent().fqn(), getResult.sourceAsMap()));
        }
    };
}
 
Example #16
Source File: TransportShardUpsertAction.java    From Elasticsearch with Apache License 2.0 5 votes vote down vote up
@Nullable
@Override
public Object referenceValue(Reference reference) {
    if (updatedColumnValues == null) {
        return super.referenceValue(reference);
    }

    Object value = updatedColumnValues.get(reference.ident().columnIdent().fqn());
    if (value == null && !reference.ident().isColumn()) {
        value = XContentMapValues.extractValue(reference.ident().columnIdent().fqn(), updatedColumnValues);
    }
    return reference.valueType().value(value);
}
 
Example #17
Source File: TypeParsers.java    From crate with Apache License 2.0 4 votes vote down vote up
public static void parseNorms(FieldMapper.Builder builder, String fieldName, Object propNode) {
    builder.omitNorms(XContentMapValues.nodeBooleanValue(propNode, fieldName + ".norms") == false);
}
 
Example #18
Source File: RiverConfig.java    From elasticsearch-river-kafka with Apache License 2.0 4 votes vote down vote up
public RiverConfig(RiverName riverName, RiverSettings riverSettings) {

        // Extract kafka related configuration
        if (riverSettings.settings().containsKey("kafka")) {
            Map<String, Object> kafkaSettings = (Map<String, Object>) riverSettings.settings().get("kafka");

            topic = (String) kafkaSettings.get(TOPIC);
            zookeeperConnect = XContentMapValues.nodeStringValue(kafkaSettings.get(ZOOKEEPER_CONNECT), "localhost");
            zookeeperConnectionTimeout = XContentMapValues.nodeIntegerValue(kafkaSettings.get(ZOOKEEPER_CONNECTION_TIMEOUT), 10000);
            messageType = MessageType.fromValue(XContentMapValues.nodeStringValue(kafkaSettings.get(MESSAGE_TYPE),
                    MessageType.JSON.toValue()));
        } else {
            zookeeperConnect = "localhost";
            zookeeperConnectionTimeout = 10000;
            topic = "elasticsearch-river-kafka";
            messageType = MessageType.JSON;
        }

        // Extract ElasticSearch related configuration
        if (riverSettings.settings().containsKey("index")) {
            Map<String, Object> indexSettings = (Map<String, Object>) riverSettings.settings().get("index");
            indexName = XContentMapValues.nodeStringValue(indexSettings.get(INDEX_NAME), riverName.name());
            typeName = XContentMapValues.nodeStringValue(indexSettings.get(MAPPING_TYPE), "status");
            bulkSize = XContentMapValues.nodeIntegerValue(indexSettings.get(BULK_SIZE), 100);
            concurrentRequests = XContentMapValues.nodeIntegerValue(indexSettings.get(CONCURRENT_REQUESTS), 1);
            actionType = ActionType.fromValue(XContentMapValues.nodeStringValue(indexSettings.get(ACTION_TYPE),
                    ActionType.INDEX.toValue()));
            flushInterval = TimeValue.parseTimeValue(XContentMapValues.nodeStringValue(indexSettings.get(FLUSH_INTERVAL), "12h"), FLUSH_12H);
        } else {
            indexName = riverName.name();
            typeName = "status";
            bulkSize = 100;
            concurrentRequests = 1;
            actionType = ActionType.INDEX;
            flushInterval = FLUSH_12H;
        }
        
        // Extract StatsD related configuration
        if (riverSettings.settings().containsKey("statsd")) {
            Map<String, Object> statsdSettings = (Map<String, Object>) riverSettings.settings().get("statsd");
            statsdHost = XContentMapValues.nodeStringValue(statsdSettings.get(STATSD_HOST), "localhost");
            statsdPrefix = XContentMapValues.nodeStringValue(statsdSettings.get(STATSD_PREFIX), "kafka_river");
            statsdPort = XContentMapValues.nodeIntegerValue(statsdSettings.get(STATSD_PORT), 8125);
            statsdIntervalInSeconds = XContentMapValues.nodeIntegerValue(statsdSettings.get(STATSD_INTERVAL_IN_SECONDS), 10);
        }
    }
 
Example #19
Source File: TextFieldMapper.java    From crate with Apache License 2.0 4 votes vote down vote up
@Override
public Mapper.Builder parse(String fieldName, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
    TextFieldMapper.Builder builder = new TextFieldMapper.Builder(fieldName);
    builder.fieldType().setIndexAnalyzer(parserContext.getIndexAnalyzers().getDefaultIndexAnalyzer());
    builder.fieldType().setSearchAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchAnalyzer());
    builder.fieldType().setSearchQuoteAnalyzer(parserContext.getIndexAnalyzers().getDefaultSearchQuoteAnalyzer());
    parseTextField(builder, fieldName, node, parserContext);
    for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
        Map.Entry<String, Object> entry = iterator.next();
        String propName = entry.getKey();
        Object propNode = entry.getValue();
        if (propName.equals("position_increment_gap")) {
            int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1);
            builder.positionIncrementGap(newPositionIncrementGap);
            iterator.remove();
        } else if (propName.equals("fielddata")) {
            builder.fielddata(XContentMapValues.nodeBooleanValue(propNode, "fielddata"));
            iterator.remove();
        } else if (propName.equals("eager_global_ordinals")) {
            builder.eagerGlobalOrdinals(XContentMapValues.nodeBooleanValue(propNode, "eager_global_ordinals"));
            iterator.remove();
        } else if (propName.equals("fielddata_frequency_filter")) {
            Map<?,?> frequencyFilter = (Map<?, ?>) propNode;
            double minFrequency = XContentMapValues.nodeDoubleValue(frequencyFilter.remove("min"), 0);
            double maxFrequency = XContentMapValues.nodeDoubleValue(frequencyFilter.remove("max"), Integer.MAX_VALUE);
            int minSegmentSize = XContentMapValues.nodeIntegerValue(frequencyFilter.remove("min_segment_size"), 0);
            builder.fielddataFrequencyFilter(minFrequency, maxFrequency, minSegmentSize);
            DocumentMapperParser.checkNoRemainingFields(propName, frequencyFilter, parserContext.indexVersionCreated());
            iterator.remove();
        } else if (propName.equals("index_prefixes")) {
            Map<?, ?> indexPrefix = (Map<?, ?>) propNode;
            int minChars = XContentMapValues.nodeIntegerValue(indexPrefix.remove("min_chars"),
                Defaults.INDEX_PREFIX_MIN_CHARS);
            int maxChars = XContentMapValues.nodeIntegerValue(indexPrefix.remove("max_chars"),
                Defaults.INDEX_PREFIX_MAX_CHARS);
            builder.indexPrefixes(minChars, maxChars);
            DocumentMapperParser.checkNoRemainingFields(propName, indexPrefix, parserContext.indexVersionCreated());
            iterator.remove();
        } else if (propName.equals("index_phrases")) {
            builder.indexPhrases(XContentMapValues.nodeBooleanValue(propNode, "index_phrases"));
            iterator.remove();
        }
    }
    return builder;
}
 
Example #20
Source File: S3River.java    From es-amazon-s3-river with Apache License 2.0 4 votes vote down vote up
@Inject
@SuppressWarnings({ "unchecked" })
protected S3River(RiverName riverName, RiverSettings settings, Client client, ThreadPool threadPool) throws Exception{
   super(riverName, settings);
   this.client = client;
   this.threadPool = threadPool;
   this.riverStatus = RiverStatus.UNKNOWN;
   
   // Deal with connector settings.
   if (settings.settings().containsKey("amazon-s3")){
      Map<String, Object> feed = (Map<String, Object>)settings.settings().get("amazon-s3");
      
      // Retrieve feed settings.
      String feedname = XContentMapValues.nodeStringValue(feed.get("name"), null);
      String bucket = XContentMapValues.nodeStringValue(feed.get("bucket"), null);
      String pathPrefix = XContentMapValues.nodeStringValue(feed.get("pathPrefix"), null);
      String downloadHost = XContentMapValues.nodeStringValue(feed.get("download_host"), null);
      int updateRate = XContentMapValues.nodeIntegerValue(feed.get("update_rate"), 15 * 60 * 1000);
      boolean jsonSupport = XContentMapValues.nodeBooleanValue(feed.get("json_support"), false);
      double indexedCharsRatio  = XContentMapValues.nodeDoubleValue(feed.get("indexed_chars_ratio"), 0.0);
      
      String[] includes = S3RiverUtil.buildArrayFromSettings(settings.settings(), "amazon-s3.includes");
      String[] excludes = S3RiverUtil.buildArrayFromSettings(settings.settings(), "amazon-s3.excludes");
      
      // Retrieve connection settings.
      String accessKey = XContentMapValues.nodeStringValue(feed.get("accessKey"), null);
      String secretKey = XContentMapValues.nodeStringValue(feed.get("secretKey"), null);
      boolean useIAMRoleForEC2 = XContentMapValues.nodeBooleanValue(feed.get("use_EC2_IAM"), false);
      
      feedDefinition = new S3RiverFeedDefinition(feedname, bucket, pathPrefix, downloadHost,
            updateRate, Arrays.asList(includes), Arrays.asList(excludes), accessKey, secretKey, useIAMRoleForEC2,
            jsonSupport, indexedCharsRatio);
   } else {
      logger.error("You didn't define the amazon-s3 settings. Exiting... See https://github.com/lbroudoux/es-amazon-s3-river");
      indexName = null;
      typeName = null;
      bulkSize = 100;
      feedDefinition = null;
      s3 = null;
      return;
   }
   
   // Deal with index settings if provided.
   if (settings.settings().containsKey("index")) {
      Map<String, Object> indexSettings = (Map<String, Object>)settings.settings().get("index");
      
      indexName = XContentMapValues.nodeStringValue(indexSettings.get("index"), riverName.name());
      typeName = XContentMapValues.nodeStringValue(indexSettings.get("type"), S3RiverUtil.INDEX_TYPE_DOC);
      bulkSize = XContentMapValues.nodeIntegerValue(indexSettings.get("bulk_size"), 100);
   } else {
      indexName = riverName.name();
      typeName = S3RiverUtil.INDEX_TYPE_DOC;
      bulkSize = 100;
   }
   
   // We need to connect to Amazon S3 after ensure mandatory settings are here.
   if (feedDefinition.getBucket() == null){
      logger.error("Amazon S3 bucket should not be null. Please fix this.");
      throw new IllegalArgumentException("Amazon S3 bucket should not be null.");
   }
   // Connect using the appropriate authentication process.
   if (feedDefinition.getAccessKey() == null && feedDefinition.getSecretKey() == null) {
      s3 = new S3Connector(feedDefinition.isUseIAMRoleForEC2());
   } else {
      s3 = new S3Connector(feedDefinition.getAccessKey(), feedDefinition.getSecretKey());
   }
   try {
      s3.connectUserBucket(feedDefinition.getBucket(), feedDefinition.getPathPrefix());
   } catch (AmazonS3Exception ase){
      logger.error("Exception while connecting Amazon S3 user bucket. "
            + "Either access key, secret key, IAM Role or bucket name are incorrect");
      throw ase;
   }

   this.riverStatus = RiverStatus.INITIALIZED;
}
 
Example #21
Source File: KafkaRiverConfig.java    From elasticsearch-river-kafka with Apache License 2.0 4 votes vote down vote up
public KafkaRiverConfig(RiverSettings settings)
{
	if (settings.settings().containsKey("kafka")) {
        Map<String, Object> kafkaSettings = (Map<String, Object>) settings.settings().get("kafka");
        
        topic = (String)kafkaSettings.get("topic");
        zookeeper = XContentMapValues.nodeStringValue(kafkaSettings.get("zookeeper"), "localhost");
        factoryClass = XContentMapValues.nodeStringValue(kafkaSettings.get("message_handler_factory_class"), "org.elasticsearch.river.kafka.JsonMessageHandlerFactory");
        brokerHost = XContentMapValues.nodeStringValue(kafkaSettings.get("broker_host"), "localhost");
        brokerPort = XContentMapValues.nodeIntegerValue(kafkaSettings.get("broker_port"), 9092);
        partition = XContentMapValues.nodeIntegerValue(kafkaSettings.get("partition"), 0);
    }
	else
	{
		zookeeper = "localhost";
		brokerHost = "localhost";
		brokerPort = 9092;
		topic = "default_topic";
		partition = 0;
		factoryClass = "org.elasticsearch.river.kafka.JsonMessageHandlerFactory";
	}
    
    if (settings.settings().containsKey("index")) {
        Map<String, Object> indexSettings = (Map<String, Object>) settings.settings().get("index");
        bulkSize = XContentMapValues.nodeIntegerValue(indexSettings.get("bulk_size_bytes"), 10*1024*1024);
        if (indexSettings.containsKey("bulk_timeout")) {
            bulkTimeout = TimeValue.parseTimeValue(XContentMapValues.nodeStringValue(indexSettings.get("bulk_timeout"), "10ms"), TimeValue.timeValueMillis(10000));
        } else {
            bulkTimeout = TimeValue.timeValueMillis(10);
        }
    } else {
        bulkSize = 10*1024*1024;
        bulkTimeout = TimeValue.timeValueMillis(10000);
    }
    
    if (settings.settings().containsKey("statsd")) {
        Map<String, Object> statsdSettings = (Map<String, Object>) settings.settings().get("statsd");
        statsdHost = (String)statsdSettings.get("host");
        statsdPort = XContentMapValues.nodeIntegerValue(statsdSettings.get("port"), 8125);
        statsdPrefix = XContentMapValues.nodeStringValue(statsdSettings.get("prefix"), "es-kafka-river");
    }
	else
	{
		statsdHost = null;
		statsdPort = -1;
		statsdPrefix = null;
	}
}
 
Example #22
Source File: AbstractIMAPRiverScenario.java    From elasticsearch-imap with Apache License 2.0 4 votes vote down vote up
protected void twoRiversScenario(final String resource1, final String resource2) throws Exception {

        Map<String, Object> settings1 = settings("/" + resource1);
        Map<String, Object> settings2 = settings("/" + resource2);
        
        putMailInMailbox(5);
        
        registerRiver("firstriver", resource1);
        Thread.sleep(500);
        registerRiver("secondriver", resource2);
        Thread.sleep(6000);
         
        Assert.assertNotNull(statusRiver(XContentMapValues.nodeStringValue(settings1.get("mail_index_name"), null)));
        Assert.assertNotNull(statusRiver(XContentMapValues.nodeStringValue(settings2.get("mail_index_name"), null)));
       
    }
 
Example #23
Source File: IcuCollationKeyFieldMapper.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 4 votes vote down vote up
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext)
    throws MapperParsingException {
    Builder builder = new Builder(name);
    TypeParsers.parseField(builder, name, node, parserContext);
    for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext(); ) {
        Map.Entry<String, Object> entry = iterator.next();
        String fieldName = entry.getKey();
        Object fieldNode = entry.getValue();
        switch (fieldName) {
            case "null_value":
                if (fieldNode == null) {
                    throw new MapperParsingException("property [null_value] cannot be null");
                }
                builder.nullValue(fieldNode.toString());
                iterator.remove();
                break;
            case "norms":
                builder.omitNorms(!XContentMapValues.nodeBooleanValue(fieldNode));
                iterator.remove();
                break;
            case "rules":
                builder.rules(XContentMapValues.nodeStringValue(fieldNode, null));
                iterator.remove();
                break;
            case "language":
                builder.language(XContentMapValues.nodeStringValue(fieldNode, null));
                iterator.remove();
                break;
            case "country":
                builder.country(XContentMapValues.nodeStringValue(fieldNode, null));
                iterator.remove();
                break;
            case "variant":
                builder.variant(XContentMapValues.nodeStringValue(fieldNode, null));
                iterator.remove();
                break;
            case "strength":
                builder.strength(XContentMapValues.nodeStringValue(fieldNode, null));
                iterator.remove();
                break;
            case "decomposition":
                builder.decomposition(XContentMapValues.nodeStringValue(fieldNode, null));
                iterator.remove();
                break;
            case "alternate":
                builder.alternate(XContentMapValues.nodeStringValue(fieldNode, null));
                iterator.remove();
                break;
            case "case_level":
                builder.caseLevel(XContentMapValues.nodeBooleanValue(fieldNode));
                iterator.remove();
                break;
            case "case_first":
                builder.caseFirst(XContentMapValues.nodeStringValue(fieldNode, null));
                iterator.remove();
                break;
            case "numeric":
                builder.numeric(XContentMapValues.nodeBooleanValue(fieldNode));
                iterator.remove();
                break;
            default:
                break;
        }
    }
    return builder;
}
 
Example #24
Source File: StringFieldMapper.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
@Override
public Mapper.Builder parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
    StringFieldMapper.Builder builder = stringField(name);
    parseTextField(builder, name, node, parserContext);
    for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
        Map.Entry<String, Object> entry = iterator.next();
        String propName = Strings.toUnderscoreCase(entry.getKey());
        Object propNode = entry.getValue();
        if (propName.equals("null_value")) {
            if (propNode == null) {
                throw new MapperParsingException("Property [null_value] cannot be null.");
            }
            builder.nullValue(propNode.toString());
            iterator.remove();
        } else if (propName.equals("search_quote_analyzer")) {
            NamedAnalyzer analyzer = parserContext.analysisService().analyzer(propNode.toString());
            if (analyzer == null) {
                throw new MapperParsingException("Analyzer [" + propNode.toString() + "] not found for field [" + name + "]");
            }
            builder.searchQuotedAnalyzer(analyzer);
            iterator.remove();
        } else if (propName.equals("position_increment_gap") ||
                parserContext.indexVersionCreated().before(Version.V_2_0_0) && propName.equals("position_offset_gap")) {
            int newPositionIncrementGap = XContentMapValues.nodeIntegerValue(propNode, -1);
            if (newPositionIncrementGap < 0) {
                throw new MapperParsingException("positions_increment_gap less than 0 aren't allowed.");
            }
            builder.positionIncrementGap(newPositionIncrementGap);
            // we need to update to actual analyzers if they are not set in this case...
            // so we can inject the position increment gap...
            if (builder.fieldType().indexAnalyzer() == null) {
                builder.fieldType().setIndexAnalyzer(parserContext.analysisService().defaultIndexAnalyzer());
            }
            if (builder.fieldType().searchAnalyzer() == null) {
                builder.fieldType().setSearchAnalyzer(parserContext.analysisService().defaultSearchAnalyzer());
            }
            if (builder.fieldType().searchQuoteAnalyzer() == null) {
                builder.fieldType().setSearchQuoteAnalyzer(parserContext.analysisService().defaultSearchQuoteAnalyzer());
            }
            iterator.remove();
        } else if (propName.equals("ignore_above")) {
            builder.ignoreAbove(XContentMapValues.nodeIntegerValue(propNode, -1));
            iterator.remove();
        } else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
            iterator.remove();
        }
    }
    return builder;
}
 
Example #25
Source File: GeoPointFieldMapperLegacy.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
public static Builder parse(Builder builder, Map<String, Object> node, Mapper.TypeParser.ParserContext parserContext) throws MapperParsingException {
    final boolean indexCreatedBeforeV2_0 = parserContext.indexVersionCreated().before(Version.V_2_0_0);
    for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
        Map.Entry<String, Object> entry = iterator.next();
        String propName = Strings.toUnderscoreCase(entry.getKey());
        Object propNode = entry.getValue();
        if (indexCreatedBeforeV2_0 && propName.equals("validate")) {
            deprecationLogger.deprecated(CONTENT_TYPE + " validate parameter is deprecated and will be removed "
            + "in the next major release");
            builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode);
            iterator.remove();
        } else if (indexCreatedBeforeV2_0 && propName.equals("validate_lon")) {
            deprecationLogger.deprecated(CONTENT_TYPE + " validate_lon parameter is deprecated and will be removed "
                + "in the next major release");
            builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode);
            iterator.remove();
        } else if (indexCreatedBeforeV2_0 && propName.equals("validate_lat")) {
            deprecationLogger.deprecated(CONTENT_TYPE + " validate_lat parameter is deprecated and will be removed "
                + "in the next major release");
            builder.ignoreMalformed = !XContentMapValues.nodeBooleanValue(propNode);
            iterator.remove();
        } else if (propName.equals(Names.COERCE)) {
            builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
            iterator.remove();
        } else if (indexCreatedBeforeV2_0 && propName.equals("normalize")) {
            deprecationLogger.deprecated(CONTENT_TYPE + " normalize parameter is deprecated and will be removed "
                + "in the next major release");
            builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
            iterator.remove();
        } else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lat")) {
            deprecationLogger.deprecated(CONTENT_TYPE + " normalize_lat parameter is deprecated and will be removed "
                + "in the next major release");
            builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
            iterator.remove();
        } else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lon")) {
            deprecationLogger.deprecated(CONTENT_TYPE + " normalize_lon parameter is deprecated and will be removed "
                + "in the next major release");
            builder.coerce = XContentMapValues.nodeBooleanValue(propNode);
            iterator.remove();
        }
    }
    return builder;
}
 
Example #26
Source File: BaseGeoPointFieldMapper.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
@Override
public Mapper.Builder<?, ?> parse(String name, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
    Builder builder;
    if (parserContext.indexVersionCreated().before(Version.V_2_2_0)) {
        builder = new GeoPointFieldMapperLegacy.Builder(name);
    } else {
        builder = new GeoPointFieldMapper.Builder(name);
    }
    parseField(builder, name, node, parserContext);

    for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
        Map.Entry<String, Object> entry = iterator.next();
        String propName = Strings.toUnderscoreCase(entry.getKey());
        Object propNode = entry.getValue();
        if (propName.equals("lat_lon")) {
            deprecationLogger.deprecated(CONTENT_TYPE + " lat_lon parameter is deprecated and will be removed "
                + "in the next major release");
            builder.enableLatLon(XContentMapValues.nodeBooleanValue(propNode));
            iterator.remove();
        } else if (propName.equals("precision_step")) {
            deprecationLogger.deprecated(CONTENT_TYPE + " precision_step parameter is deprecated and will be removed "
                + "in the next major release");
            builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode));
            iterator.remove();
        } else if (propName.equals("geohash")) {
            builder.enableGeoHash(XContentMapValues.nodeBooleanValue(propNode));
            iterator.remove();
        } else if (propName.equals("geohash_prefix")) {
            builder.geoHashPrefix(XContentMapValues.nodeBooleanValue(propNode));
            if (XContentMapValues.nodeBooleanValue(propNode)) {
                builder.enableGeoHash(true);
            }
            iterator.remove();
        } else if (propName.equals("geohash_precision")) {
            if (propNode instanceof Integer) {
                builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(propNode));
            } else {
                builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(propNode.toString()));
            }
            iterator.remove();
        } else if (propName.equals(Names.IGNORE_MALFORMED)) {
            builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode));
            iterator.remove();
        } else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
            iterator.remove();
        }
    }

    if (builder instanceof GeoPointFieldMapperLegacy.Builder) {
        return GeoPointFieldMapperLegacy.parse((GeoPointFieldMapperLegacy.Builder) builder, node, parserContext);
    }

    return (GeoPointFieldMapper.Builder) builder;
}
 
Example #27
Source File: SourceLookup.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
public Object extractValue(String path) {
    return XContentMapValues.extractValue(path, loadSourceIfNeeded());
}
 
Example #28
Source File: SourceLookup.java    From Elasticsearch with Apache License 2.0 4 votes vote down vote up
public Object filter(String[] includes, String[] excludes) {
    return XContentMapValues.filter(loadSourceIfNeeded(), includes, excludes);
}
 
Example #29
Source File: AbstractIMAPRiverScenario.java    From elasticsearch-imap with Apache License 2.0 3 votes vote down vote up
protected void renameScenarioIMAP(final String resource) throws Exception {

        Map<String, Object> settings = settings("/" + resource);

        final Properties props = new Properties();
        final String indexName = XContentMapValues.nodeStringValue(settings.get("mail_index_name"), "imapriver");
        final String typeName = XContentMapValues.nodeStringValue(settings.get("mail_type_name"), "mail");
        final String user = XContentMapValues.nodeStringValue(settings.get("user"), null);
        final String password = XContentMapValues.nodeStringValue(settings.get("password"), null);

        for (final Map.Entry<String, Object> entry : settings.entrySet()) {

            if (entry != null && entry.getKey().startsWith("mail.")) {
                props.setProperty(entry.getKey(), String.valueOf(entry.getValue()));
            }
        }

        createInitialIMAPTestdata(props, user, password, 10, true); // 3*10

        registerRiver("myrivunit", resource);

        Thread.sleep(1000);

        Thread.sleep(25 * 1000);

        Assert.assertEquals(30, getCount(indexName, typeName));

        renameMailbox(props, "ES-IMAP-RIVER-TESTS", user, password);

        Thread.sleep(25 * 1000);

        deleteMailsFromUserMailbox(props, "renamed_from_ES-IMAP-RIVER-TESTS", 4, 2, user, password); // delete
        // 2

        Thread.sleep(25 * 1000);

        Assert.assertEquals(28, getCount(indexName, typeName));

    }
 
Example #30
Source File: AbstractIMAPRiverScenario.java    From elasticsearch-imap with Apache License 2.0 2 votes vote down vote up
protected void deleteScenarioPOP(final String resource) throws Exception {

        Map<String, Object> settings = settings("/" + resource);

        final Properties props = new Properties();
        final String indexName = XContentMapValues.nodeStringValue(settings.get("mail_index_name"), "imapriver");
        final String typeName = XContentMapValues.nodeStringValue(settings.get("mail_type_name"), "mail");
        final String user = XContentMapValues.nodeStringValue(settings.get("user"), null);
        final String password = XContentMapValues.nodeStringValue(settings.get("password"), null);

        for (final Map.Entry<String, Object> entry : settings.entrySet()) {

            if (entry != null && entry.getKey().startsWith("mail.")) {
                props.setProperty(entry.getKey(), String.valueOf(entry.getValue()));
            }
        }

        putMailInMailbox(10);

        registerRiver("myrivunit", resource);

        Thread.sleep(1000);

        Thread.sleep(5 * 1000);

        deleteMailsFromUserMailbox(props, "INBOX", 1, 3, user, password); // delete
        // 3

        Thread.sleep(5 * 1000);

        putMailInMailbox(5);

        Thread.sleep(5 * 1000);

        deleteMailsFromUserMailbox(props, "INBOX", 4, 2, user, password); // delete
        // 2

        Thread.sleep(5 * 1000);

        Assert.assertEquals(10, getCount(indexName, typeName));

    }