org.elasticsearch.index.mapper.TextFieldMapper Java Examples

The following examples show how to use org.elasticsearch.index.mapper.TextFieldMapper. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: LocalWeatherDataMapper.java    From FlinkExperiments with MIT License 6 votes vote down vote up
@Override
protected void configureRootObjectBuilder(RootObjectMapper.Builder builder) {
    builder
            .add(new DateFieldMapper.Builder("dateTime"))
            .add(new NumberFieldMapper.Builder("temperature", NumberType.FLOAT))
            .add(new NumberFieldMapper.Builder("windSpeed", NumberType.FLOAT))
            .add(new NumberFieldMapper.Builder("stationPressure", NumberType.FLOAT))
            .add(new TextFieldMapper.Builder("skyCondition"))
            .add(new ObjectMapper.Builder("station")
                    .add(new TextFieldMapper.Builder("wban"))
                    .add(new TextFieldMapper.Builder("name"))
                    .add(new TextFieldMapper.Builder("state"))
                    .add(new TextFieldMapper.Builder("location"))
                    .add(new GeoPointFieldMapper.Builder("coordinates"))
                    .nested(ObjectMapper.Nested.newNested(true, false)));
}
 
Example #2
Source File: IndicesModule.java    From crate with Apache License 2.0 5 votes vote down vote up
private Map<String, Mapper.TypeParser> getMappers(List<MapperPlugin> mapperPlugins) {
    Map<String, Mapper.TypeParser> mappers = new LinkedHashMap<>();

    // builtin mappers
    for (NumberFieldMapper.NumberType type : NumberFieldMapper.NumberType.values()) {
        mappers.put(type.typeName(), new NumberFieldMapper.TypeParser(type));
    }
    mappers.put(BooleanFieldMapper.CONTENT_TYPE, new BooleanFieldMapper.TypeParser());
    mappers.put(DateFieldMapper.CONTENT_TYPE, new DateFieldMapper.TypeParser());
    mappers.put(IpFieldMapper.CONTENT_TYPE, new IpFieldMapper.TypeParser());
    mappers.put(TextFieldMapper.CONTENT_TYPE, new TextFieldMapper.TypeParser());
    mappers.put(KeywordFieldMapper.CONTENT_TYPE, new KeywordFieldMapper.TypeParser());
    mappers.put(ObjectMapper.CONTENT_TYPE, new ObjectMapper.TypeParser());
    mappers.put(FieldAliasMapper.CONTENT_TYPE, new FieldAliasMapper.TypeParser());
    mappers.put(GeoPointFieldMapper.CONTENT_TYPE, new GeoPointFieldMapper.TypeParser());

    if (ShapesAvailability.JTS_AVAILABLE && ShapesAvailability.SPATIAL4J_AVAILABLE) {
        mappers.put(GeoShapeFieldMapper.CONTENT_TYPE, new GeoShapeFieldMapper.TypeParser());
    }

    for (MapperPlugin mapperPlugin : mapperPlugins) {
        for (Map.Entry<String, Mapper.TypeParser> entry : mapperPlugin.getMappers().entrySet()) {
            if (mappers.put(entry.getKey(), entry.getValue()) != null) {
                throw new IllegalArgumentException("Mapper [" + entry.getKey() + "] is already registered");
            }
        }
    }
    return Collections.unmodifiableMap(mappers);
}
 
Example #3
Source File: KeywordFieldMapperTest.java    From crate with Apache License 2.0 5 votes vote down vote up
@Test
public void test_keywords_fields_mapping_merge_fail_on_different_length_limit() throws Exception {
    var parser = parser(new TextFieldMapper.TypeParser());

    var mapper = parser.parse(TYPE, new CompressedXContent(Strings.toString(
        XContentFactory.jsonBuilder()
            .startObject()
                .startObject(TYPE)
                    .startObject("properties")
                        .startObject("text_col")
                            .field("type", "keyword")
                            .field("length_limit", 2)
                        .endObject()
                    .endObject()
                .endObject()
            .endObject())));
    var anotherMapper = parser.parse(TYPE, new CompressedXContent(Strings.toString(
        XContentFactory.jsonBuilder()
            .startObject()
                .startObject(TYPE)
                    .startObject("properties")
                        .startObject("text_col")
                            .field("type", "keyword")
                            .field("index", false)
                            .field("length_limit", 1)
                        .endObject()
                    .endObject()
                .endObject()
            .endObject())));

    expectedException.expect(IllegalArgumentException.class);
    expectedException.expectMessage("mapper [text_col] has different length_limit settings, current [2], merged [1]");
    mapper.merge(anotherMapper.mapping(), false);
}
 
Example #4
Source File: ReferenceMapper.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 4 votes vote down vote up
public Builder(String name, Client client) {
    super(name, FIELD_TYPE, FIELD_TYPE);
    this.client = client;
    this.refFields = new LinkedList<>();
    this.contentBuilder = new TextFieldMapper.Builder(name);
}
 
Example #5
Source File: AnalysisRegistry.java    From crate with Apache License 2.0 4 votes vote down vote up
private void processAnalyzerFactory(IndexSettings indexSettings,
                                    String name,
                                    AnalyzerProvider<?> analyzerFactory,
                                    Map<String, NamedAnalyzer> analyzerAliases,
                                    Map<String, NamedAnalyzer> analyzers, Map<String, TokenFilterFactory> tokenFilters,
                                    Map<String, CharFilterFactory> charFilters, Map<String, TokenizerFactory> tokenizers) {
    /*
     * Lucene defaults positionIncrementGap to 0 in all analyzers but
     * Elasticsearch defaults them to 0 only before version 2.0
     * and 100 afterwards so we override the positionIncrementGap if it
     * doesn't match here.
     */
    int overridePositionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
    if (analyzerFactory instanceof CustomAnalyzerProvider) {
        ((CustomAnalyzerProvider) analyzerFactory).build(tokenizers, charFilters, tokenFilters);
        /*
         * Custom analyzers already default to the correct, version
         * dependent positionIncrementGap and the user is be able to
         * configure the positionIncrementGap directly on the analyzer so
         * we disable overriding the positionIncrementGap to preserve the
         * user's setting.
         */
        overridePositionIncrementGap = Integer.MIN_VALUE;
    }
    Analyzer analyzerF = analyzerFactory.get();
    if (analyzerF == null) {
        throw new IllegalArgumentException("analyzer [" + analyzerFactory.name() + "] created null analyzer");
    }
    NamedAnalyzer analyzer;
    if (analyzerF instanceof NamedAnalyzer) {
        // if we got a named analyzer back, use it...
        analyzer = (NamedAnalyzer) analyzerF;
        if (overridePositionIncrementGap >= 0 && analyzer.getPositionIncrementGap(analyzer.name()) != overridePositionIncrementGap) {
            // unless the positionIncrementGap needs to be overridden
            analyzer = new NamedAnalyzer(analyzer, overridePositionIncrementGap);
        }
    } else {
        analyzer = new NamedAnalyzer(name, analyzerFactory.scope(), analyzerF, overridePositionIncrementGap);
    }
    if (analyzers.containsKey(name)) {
        throw new IllegalStateException("already registered analyzer with name: " + name);
    }
    analyzers.put(name, analyzer);
    // TODO: remove alias support completely when we no longer support pre 5.0 indices
    final String analyzerAliasKey = "index.analysis.analyzer." + analyzerFactory.name() + ".alias";
    if (indexSettings.getSettings().get(analyzerAliasKey) != null) {
        throw new IllegalArgumentException("setting [" + analyzerAliasKey + "] is not supported");
    }
}
 
Example #6
Source File: CustomAnalyzerProvider.java    From crate with Apache License 2.0 4 votes vote down vote up
public void build(final Map<String, TokenizerFactory> tokenizers, final Map<String, CharFilterFactory> charFilters,
                  final Map<String, TokenFilterFactory> tokenFilters) {
    String tokenizerName = analyzerSettings.get("tokenizer");
    if (tokenizerName == null) {
        throw new IllegalArgumentException("Custom Analyzer [" + name() + "] must be configured with a tokenizer");
    }

    TokenizerFactory tokenizer = tokenizers.get(tokenizerName);
    if (tokenizer == null) {
        throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find tokenizer under name [" + tokenizerName + "]");
    }

    List<String> charFilterNames = analyzerSettings.getAsList("char_filter");
    List<CharFilterFactory> charFiltersList = new ArrayList<>(charFilterNames.size());
    for (String charFilterName : charFilterNames) {
        CharFilterFactory charFilter = charFilters.get(charFilterName);
        if (charFilter == null) {
            throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find char_filter under name [" + charFilterName + "]");
        }
        charFiltersList.add(charFilter);
    }

    int positionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;

    positionIncrementGap = analyzerSettings.getAsInt("position_increment_gap", positionIncrementGap);

    int offsetGap = analyzerSettings.getAsInt("offset_gap", -1);

    List<String> tokenFilterNames = analyzerSettings.getAsList("filter");
    List<TokenFilterFactory> tokenFilterList = new ArrayList<>(tokenFilterNames.size());
    for (String tokenFilterName : tokenFilterNames) {
        TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
        if (tokenFilter == null) {
            throw new IllegalArgumentException("Custom Analyzer [" + name() + "] failed to find filter under name [" + tokenFilterName + "]");
        }
        tokenFilter = tokenFilter.getChainAwareTokenFilterFactory(tokenizer, charFiltersList, tokenFilterList, tokenFilters::get);
        tokenFilterList.add(tokenFilter);
    }

    this.customAnalyzer = new CustomAnalyzer(tokenizerName, tokenizer,
            charFiltersList.toArray(new CharFilterFactory[charFiltersList.size()]),
            tokenFilterList.toArray(new TokenFilterFactory[tokenFilterList.size()]),
            positionIncrementGap,
            offsetGap
    );
}