org.elasticsearch.hadoop.cfg.ConfigurationOptions Java Examples

The following examples show how to use org.elasticsearch.hadoop.cfg.ConfigurationOptions. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HttpRetryHandler.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
public HttpRetryHandler(Settings settings) {
    String retryPolicyName = settings.getBatchWriteRetryPolicy();

    if (ConfigurationOptions.ES_BATCH_WRITE_RETRY_POLICY_SIMPLE.equals(retryPolicyName)) {
        retryPolicyName = SimpleHttpRetryPolicy.class.getName();
    }
    else if (ConfigurationOptions.ES_BATCH_WRITE_RETRY_POLICY_NONE.equals(retryPolicyName)) {
        retryPolicyName = NoHttpRetryPolicy.class.getName();
    }

    HttpRetryPolicy retryPolicy = ObjectUtils.instantiate(retryPolicyName, settings);
    this.retry = retryPolicy.init();

    this.retryLimit = settings.getBatchWriteRetryCount();
    this.retryTime = settings.getBatchWriteRetryWait();
}
 
Example #2
Source File: CommandTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testUpdateOnlyInlineScript1X() throws Exception {
    assumeTrue(ConfigurationOptions.ES_OPERATION_UPDATE.equals(operation));
    assumeTrue(version.onOrBefore(EsMajorVersion.V_1_X));
    Settings set = settings();

    set.setProperty(ConfigurationOptions.ES_INDEX_AUTO_CREATE, "yes");
    set.setProperty(ConfigurationOptions.ES_UPDATE_RETRY_ON_CONFLICT, "3");
    set.setProperty(ConfigurationOptions.ES_UPDATE_SCRIPT_INLINE, "counter = 3");
    set.setProperty(ConfigurationOptions.ES_UPDATE_SCRIPT_LANG, "groovy");

    create(set).write(data).copyTo(ba);
    String result =
            "{\"" + operation + "\":{\"_id\":2,\"_retry_on_conflict\":3}}\n" +
                    "{\"lang\":\"groovy\",\"script\":\"counter = 3\"}\n";
    assertEquals(result, ba.toString());
}
 
Example #3
Source File: AbstractMROldApiSaveTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testBasicMultiSave() throws Exception {
    JobConf conf = createJobConf();
    conf.set(ConfigurationOptions.ES_RESOURCE, resource("oldapi-multi-save", "data", clusterInfo.getMajorVersion()));

    MultiOutputFormat.addOutputFormat(conf, EsOutputFormat.class);
    MultiOutputFormat.addOutputFormat(conf, PrintStreamOutputFormat.class);
    //MultiOutputFormat.addOutputFormat(conf, TextOutputFormat.class);

    PrintStreamOutputFormat.stream(conf, Stream.OUT);
    //conf.set("mapred.output.dir", "foo/bar");
    //FileOutputFormat.setOutputPath(conf, new Path("foo/bar"));

    conf.setClass("mapred.output.format.class", MultiOutputFormat.class, OutputFormat.class);
    runJob(conf);
}
 
Example #4
Source File: AbstractMROldApiSaveTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testBasicIndexWithConstantRouting() throws Exception {
    String index = indexPrefix + "mroldapi-savewithconstantrouting";
    String type = "data";
    String target = resource(index, type, clusterInfo.getMajorVersion());

    JobConf conf = createJobConf();
    conf.set(ConfigurationOptions.ES_MAPPING_ROUTING, "<foobar/>");

    RestUtils.touch(index);
    if (clusterInfo.getMajorVersion().onOrAfter(EsMajorVersion.V_7_X)) {
        conf.set(ConfigurationOptions.ES_RESOURCE, index);
        RestUtils.putMapping(index, type, StringUtils.toUTF("{\"_routing\": {\"required\":true}}"));
    } else {
        conf.set(ConfigurationOptions.ES_RESOURCE, target);
        RestUtils.putMapping(index, type, StringUtils.toUTF("{\""+ type + "\":{\"_routing\": {\"required\":true}}}"));
    }

    runJob(conf);
}
 
Example #5
Source File: CommandTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testUpdateOnlyParamFileScript1X() throws Exception {
    assumeTrue(ConfigurationOptions.ES_OPERATION_UPDATE.equals(operation));
    assumeTrue(version.onOrBefore(EsMajorVersion.V_1_X));
    Settings set = settings();

    set.setProperty(ConfigurationOptions.ES_MAPPING_ID, "n");
    set.setProperty(ConfigurationOptions.ES_UPDATE_SCRIPT_FILE, "set_counter");
    set.setProperty(ConfigurationOptions.ES_UPDATE_SCRIPT_LANG, "groovy");
    set.setProperty(ConfigurationOptions.ES_UPDATE_SCRIPT_PARAMS, " param1:<1>,   param2:n ");

    create(set).write(data).copyTo(ba);

    String result =
            "{\"" + operation + "\":{\"_id\":1}}\n" +
                    "{\"params\":{\"param1\":1,\"param2\":1},\"lang\":\"groovy\",\"script_file\":\"set_counter\"}\n";
    assertEquals(result, ba.toString());
}
 
Example #6
Source File: AbstractRestSaveTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testCreatePartitionWriterWithAliasUsingSingleIndex() throws Exception {
    RestUtils.postData("_aliases", ("{" +
                "\"actions\": [" +
                    "{" +
                        "\"add\": {" +
                            "\"index\": \"alias_index1\"," +
                            "\"alias\": \"single_alias\"" +
                        "}" +
                    "}" +
                "]" +
            "}").getBytes());

    Settings settings = new TestSettings();
    settings.setProperty(ConfigurationOptions.ES_RESOURCE, "single_alias/doc");
    InitializationUtils.setValueWriterIfNotSet(settings, WritableValueWriter.class, LOG);
    InitializationUtils.setBytesConverterIfNeeded(settings, WritableBytesConverter.class, LOG);
    InitializationUtils.setFieldExtractorIfNotSet(settings, MapWritableFieldExtractor.class, LOG);
    RestService.PartitionWriter writer = RestService.createWriter(settings, 1, 3, LOG);
    writer.close();
}
 
Example #7
Source File: AbstractMROldApiSaveTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testUpdateOnlyParamJsonScript() throws Exception {
    JobConf conf = createJobConf();
    conf.set(ConfigurationOptions.ES_RESOURCE, resource("mroldapi-createwithid", "data", clusterInfo.getMajorVersion()));
    conf.set(ConfigurationOptions.ES_INDEX_AUTO_CREATE, "yes");

    conf.set(ConfigurationOptions.ES_WRITE_OPERATION, "update");
    conf.set(ConfigurationOptions.ES_MAPPING_ID, "number");
    conf.set(ConfigurationOptions.ES_UPDATE_SCRIPT_PARAMS_JSON, "{ \"param1\":1, \"param2\":2}");

    if (clusterInfo.getMajorVersion().onOrAfter(EsMajorVersion.V_5_X)) {
        conf.set(ConfigurationOptions.ES_UPDATE_SCRIPT_INLINE, "int counter = params.param1; int anothercounter = params.param2");
        conf.set(ConfigurationOptions.ES_UPDATE_SCRIPT_LANG, "painless");
    } else {
        conf.set(ConfigurationOptions.ES_UPDATE_SCRIPT_INLINE, "counter = param1; anothercounter = param2");
        conf.set(ConfigurationOptions.ES_UPDATE_SCRIPT_LANG, "groovy");
    }

    runJob(conf);
}
 
Example #8
Source File: AbstractMROldApiSaveTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testBasicIndexWithExtractedRouting() throws Exception {
    String index = indexPrefix + "mroldapi-savewithdynamicrouting";
    String type = "data";
    String target = resource(index, type, clusterInfo.getMajorVersion());

    JobConf conf = createJobConf();
    conf.set(ConfigurationOptions.ES_MAPPING_ROUTING, "number");

    RestUtils.touch(index);
    if (clusterInfo.getMajorVersion().onOrAfter(EsMajorVersion.V_7_X)) {
        conf.set(ConfigurationOptions.ES_RESOURCE, index);
        RestUtils.putMapping(index, type, StringUtils.toUTF("{\"_routing\": {\"required\":true}}"));
    } else {
        conf.set(ConfigurationOptions.ES_RESOURCE, target);
        RestUtils.putMapping(index, type, StringUtils.toUTF("{\""+ type + "\":{\"_routing\": {\"required\":true}}}"));
    }


    runJob(conf);
}
 
Example #9
Source File: ScrollReaderTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test(expected = EsHadoopException.class)
public void testScrollWithThrowingErrorHandler() throws IOException {
    MappingSet mappings = getMappingSet("numbers-as-strings");

    InputStream stream = getClass().getResourceAsStream(scrollData("numbers-as-strings"));

    Settings testSettings = new TestSettings();
    testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA, "" + readMetadata);
    testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA_FIELD, "" + metadataField);
    testSettings.setProperty(ConfigurationOptions.ES_OUTPUT_JSON, "" + readAsJson);
    testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLERS , "throw");
    testSettings.setProperty(DeserializationHandlerLoader.ES_READ_DATA_ERROR_HANDLER + ".throw" , ExceptionThrowingHandler.class.getName());

    JdkValueReader valueReader = ObjectUtils.instantiate(JdkValueReader.class.getName(), testSettings);

    ScrollReader reader = new ScrollReader(ScrollReaderConfigBuilder.builder(valueReader, mappings.getResolvedView(), testSettings));

    reader.read(stream);
    fail("Should not be able to parse string as long");
}
 
Example #10
Source File: StreamFromEs.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
public static void submitJob(String principal, String keytab, String esNodes) throws Exception {
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("ES", new EsSpout("storm-test"));
    builder.setBolt("Output", new CapturingBolt()).shuffleGrouping("ES");

    // Nimbus needs to be started with the cred renewer and credentials plugins set in its config file

    Config conf = new Config();
    List<Object> plugins = new ArrayList<Object>();
    plugins.add(AutoElasticsearch.class.getName());
    conf.put(Config.TOPOLOGY_AUTO_CREDENTIALS, plugins);
    conf.put(ConfigurationOptions.ES_NODES, esNodes);
    conf.put(ConfigurationOptions.ES_SECURITY_AUTHENTICATION, "kerberos");
    conf.put(ConfigurationOptions.ES_NET_SPNEGO_AUTH_ELASTICSEARCH_PRINCIPAL, "HTTP/[email protected]");
    conf.put(ConfigurationOptions.ES_INPUT_JSON, "true");
    StormSubmitter.submitTopology("test-read", conf, builder.createTopology());
}
 
Example #11
Source File: AbstractMROldApiSaveTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testSaveWithIngest() throws Exception {
    EsAssume.versionOnOrAfter(EsMajorVersion.V_5_X, "Ingest Supported in 5.x and above only");

    JobConf conf = createJobConf();

    RestUtils.ExtendedRestClient client = new RestUtils.ExtendedRestClient();
    String prefix = "mroldapi";
    String pipeline = "{\"description\":\"Test Pipeline\",\"processors\":[{\"set\":{\"field\":\"pipeTEST\",\"value\":true,\"override\":true}}]}";
    client.put("/_ingest/pipeline/" + prefix + "-pipeline", StringUtils.toUTF(pipeline));
    client.close();

    conf.set(ConfigurationOptions.ES_RESOURCE, resource("mroldapi-ingested", "data", clusterInfo.getMajorVersion()));
    conf.set(ConfigurationOptions.ES_INGEST_PIPELINE, "mroldapi-pipeline");
    conf.set(ConfigurationOptions.ES_NODES_INGEST_ONLY, "true");

    runJob(conf);
}
 
Example #12
Source File: StreamToEs.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
public static void submitJob(String principal, String keytab, String esNodes) throws Exception {
    List doc1 = Collections.singletonList("{\"reason\" : \"business\",\"airport\" : \"SFO\"}");
    List doc2 = Collections.singletonList("{\"participants\" : 5,\"airport\" : \"OTP\"}");

    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("Input", new TestSpout(ImmutableList.of(doc1, doc2), new Fields("json"), true));
    builder.setBolt("ES", new EsBolt("storm-test"))
            .shuffleGrouping("Input")
            .addConfiguration(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, 2);

    // Nimbus needs to be started with the cred renewer and credentials plugins set in its config file

    Config conf = new Config();
    List<Object> plugins = new ArrayList<Object>();
    plugins.add(AutoElasticsearch.class.getName());
    conf.put(Config.TOPOLOGY_AUTO_CREDENTIALS, plugins);
    conf.put(ConfigurationOptions.ES_NODES, esNodes);
    conf.put(ConfigurationOptions.ES_SECURITY_AUTHENTICATION, "kerberos");
    conf.put(ConfigurationOptions.ES_NET_SPNEGO_AUTH_ELASTICSEARCH_PRINCIPAL, "HTTP/[email protected]");
    conf.put(ConfigurationOptions.ES_INPUT_JSON, "true");
    StormSubmitter.submitTopology("test-run", conf, builder.createTopology());
}
 
Example #13
Source File: AbstractRestSaveTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testBulkWrite() throws Exception {
    TestSettings testSettings = new TestSettings("rest/savebulk");
    //testSettings.setPort(9200)
    testSettings.setProperty(ConfigurationOptions.ES_SERIALIZATION_WRITER_VALUE_CLASS, JdkValueWriter.class.getName());
    RestRepository client = new RestRepository(testSettings);

    Scanner in = new Scanner(getClass().getResourceAsStream("/artists.dat")).useDelimiter("\\n|\\t");

    Map<String, String> line = new LinkedHashMap<String, String>();

    for (; in.hasNextLine();) {
        // ignore number
        in.next();
        line.put("name", in.next());
        line.put("url", in.next());
        line.put("picture", in.next());
        client.writeToIndex(line);
        line.clear();
    }

    client.close();
}
 
Example #14
Source File: AbstractStormIdMappingBoltTests.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void test2WriteWithId() throws Exception {
    List doc1 = ImmutableList.of("one", "fo1", "two", "fo2", "number", 1);
    List doc2 = ImmutableList.of("OTP", "Otopeni", "SFO", "San Fran", "number", 2);

    Map localCfg = new LinkedHashMap(conf);
    localCfg.put(ConfigurationOptions.ES_MAPPING_ID, "number");

    String target = index + "/id-write";
    TopologyBuilder builder = new TopologyBuilder();
    builder.setSpout("test-spout-2", new TestSpout(ImmutableList.of(doc2, doc1), new Fields("key1", "valo1", "key2",
            "valo2", "key3", "number")));
    builder.setBolt("es-bolt-2", new TestBolt(new EsBolt(target, localCfg))).shuffleGrouping("test-spout-2");

    MultiIndexSpoutStormSuite.run(index + "id-write", builder.createTopology(), COMPONENT_HAS_COMPLETED);

    COMPONENT_HAS_COMPLETED.waitFor(1, TimeValue.timeValueSeconds(10));

    RestUtils.refresh(index);
    Thread.sleep(1000);
    assertTrue(RestUtils.exists(target + "/1"));
    assertTrue(RestUtils.exists(target + "/2"));

    String results = RestUtils.get(target + "/_search?");
    assertThat(results, containsString("two"));
}
 
Example #15
Source File: CommandTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
private Settings settings() {
    Settings set = new TestSettings();

    set.setInternalVersion(version);
    set.setProperty(ConfigurationOptions.ES_INPUT_JSON, Boolean.toString(jsonInput));
    InitializationUtils.setValueWriterIfNotSet(set, JdkValueWriter.class, null);
    InitializationUtils.setFieldExtractorIfNotSet(set, MapFieldExtractor.class, null);
    InitializationUtils.setBytesConverterIfNeeded(set, JdkBytesConverter.class, null);
    InitializationUtils.setUserProviderIfNotSet(set, HadoopUserProvider.class, null);

    set.setProperty(ConfigurationOptions.ES_WRITE_OPERATION, operation);
    if (version.onOrAfter(EsMajorVersion.V_8_X)) {
        set.setResourceWrite("foo");
    } else {
        set.setResourceWrite("foo/bar");
    }
    if (isUpdateOp()) {
        set.setProperty(ConfigurationOptions.ES_MAPPING_ID, "<2>");
    }
    return set;
}
 
Example #16
Source File: AbstractMROldApiSaveTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testUpdateOnlyScript() throws Exception {
    JobConf conf = createJobConf();
    // use an existing id to allow the update to succeed
    conf.set(ConfigurationOptions.ES_RESOURCE, resource("mroldapi-createwithid", "data", clusterInfo.getMajorVersion()));
    conf.set(ConfigurationOptions.ES_WRITE_OPERATION, "update");
    conf.set(ConfigurationOptions.ES_MAPPING_ID, "number");

    conf.set(ConfigurationOptions.ES_INDEX_AUTO_CREATE, "yes");
    conf.set(ConfigurationOptions.ES_UPDATE_RETRY_ON_CONFLICT, "3");

    if (clusterInfo.getMajorVersion().onOrAfter(EsMajorVersion.V_5_X)) {
        conf.set(ConfigurationOptions.ES_UPDATE_SCRIPT_INLINE, "int counter = 3");
        conf.set(ConfigurationOptions.ES_UPDATE_SCRIPT_LANG, "painless");
    } else {
        conf.set(ConfigurationOptions.ES_UPDATE_SCRIPT_INLINE, "counter = 3");
        conf.set(ConfigurationOptions.ES_UPDATE_SCRIPT_LANG, "groovy");
    }

    runJob(conf);
}
 
Example #17
Source File: ScrollReaderTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test(expected = EsHadoopParsingException.class)
public void testScrollWithBreakOnInvalidMapping() throws IOException {
    MappingSet mappings = getMappingSet("numbers-as-strings");

    InputStream stream = getClass().getResourceAsStream(scrollData("numbers-as-strings"));

    Settings testSettings = new TestSettings();
    testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA, "" + readMetadata);
    testSettings.setProperty(ConfigurationOptions.ES_READ_METADATA_FIELD, "" + metadataField);
    testSettings.setProperty(ConfigurationOptions.ES_OUTPUT_JSON, "" + readAsJson);

    JdkValueReader valueReader = ObjectUtils.instantiate(JdkValueReader.class.getName(), testSettings);

    ScrollReader reader = new ScrollReader(ScrollReaderConfigBuilder.builder(valueReader, mappings.getResolvedView(), testSettings));

    reader.read(stream);
    fail("Should not be able to parse string as long");
}
 
Example #18
Source File: CommandTest.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
@Test
public void testUpdateOnlyInlineScript6X() throws Exception {
    assumeTrue(ConfigurationOptions.ES_OPERATION_UPDATE.equals(operation));
    assumeTrue(version.on(EsMajorVersion.V_6_X));
    Settings set = settings();

    set.setProperty(ConfigurationOptions.ES_INDEX_AUTO_CREATE, "yes");
    set.setProperty(ConfigurationOptions.ES_UPDATE_RETRY_ON_CONFLICT, "3");
    set.setProperty(ConfigurationOptions.ES_UPDATE_SCRIPT_INLINE, "counter = 3");
    set.setProperty(ConfigurationOptions.ES_UPDATE_SCRIPT_LANG, "groovy");

    create(set).write(data).copyTo(ba);
    String result =
            "{\"" + operation + "\":{\"_id\":2,\"_retry_on_conflict\":3}}\n" +
                    "{\"script\":{\"source\":\"counter = 3\",\"lang\":\"groovy\"}}\n";
    assertEquals(result, ba.toString());
}
 
Example #19
Source File: InitializationUtils.java    From elasticsearch-hadoop with Apache License 2.0 6 votes vote down vote up
public static boolean setValueWriterIfNotSet(Settings settings, Class<? extends ValueWriter<?>> clazz, Log log) {
    if (!StringUtils.hasText(settings.getSerializerValueWriterClassName())) {
        Log logger = (log != null ? log : LogFactory.getLog(clazz));

        String name = clazz.getName();
        if (settings.getInputAsJson()) {
            name = NoOpValueWriter.class.getName();
            if (logger.isDebugEnabled()) {
                logger.debug(String.format("Elasticsearch input marked as JSON; bypassing serialization through [%s] instead of [%s]", name, clazz));
            }
        }
        settings.setProperty(ConfigurationOptions.ES_SERIALIZATION_WRITER_VALUE_CLASS, name);
        if (logger.isDebugEnabled()) {
            logger.debug(String.format("Using pre-defined writer serializer [%s] as default", settings.getSerializerValueWriterClassName()));
        }
        return true;
    }

    return false;
}
 
Example #20
Source File: AbstractMRNewApiSaveTest.java    From elasticsearch-hadoop with Apache License 2.0 5 votes vote down vote up
@Test(expected = EsHadoopIllegalArgumentException.class)
public void testUpdateWithoutId() throws Exception {
    Configuration conf = createConf();
    conf.set(ConfigurationOptions.ES_WRITE_OPERATION, "update");
    conf.set(ConfigurationOptions.ES_RESOURCE, resource("mrnewapi-update", "data", clusterInfo.getMajorVersion()));

    runJob(conf);
}
 
Example #21
Source File: CommandTest.java    From elasticsearch-hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testAll() throws Exception {
    assumeTrue(version.onOrBefore(EsMajorVersion.V_6_X));
    assumeFalse(isDeleteOP() && jsonInput);
    Settings settings = settings();
    settings.setProperty(ConfigurationOptions.ES_MAPPING_ID, "n");
    settings.setProperty(ConfigurationOptions.ES_MAPPING_TTL, "<2>");
    settings.setProperty(ConfigurationOptions.ES_MAPPING_ROUTING, "s");

    create(settings).write(data).copyTo(ba);
    String result = "{\"" + operation + "\":{\"_id\":1,\"_routing\":\"v\",\"_ttl\":2}}" + map();
    assertEquals(result, ba.toString());
}
 
Example #22
Source File: CommandTest.java    From elasticsearch-hadoop with Apache License 2.0 5 votes vote down vote up
@Parameters
public static Collection<Object[]> data() {

    // make sure all versions are tested. Throw if a new one is seen:
    if (EsMajorVersion.LATEST != EsMajorVersion.V_8_X) {
        throw new IllegalStateException("CommandTest needs new version updates.");
    }

    Collection<Object[]> result = new ArrayList<>();

    String[] operations = new String[]{ConfigurationOptions.ES_OPERATION_INDEX,
            ConfigurationOptions.ES_OPERATION_CREATE,
            ConfigurationOptions.ES_OPERATION_UPDATE,
            ConfigurationOptions.ES_OPERATION_DELETE};
    boolean[] asJsons = new boolean[]{false, true};
    EsMajorVersion[] versions = new EsMajorVersion[]{EsMajorVersion.V_1_X,
            EsMajorVersion.V_2_X,
            EsMajorVersion.V_5_X,
            EsMajorVersion.V_6_X,
            EsMajorVersion.V_7_X,
            EsMajorVersion.V_8_X};

    for (EsMajorVersion version : versions) {
        for (boolean asJson : asJsons) {
            for (String operation : operations) {
                result.add(new Object[]{operation, asJson, version});
            }
        }
    }

    return result;
}
 
Example #23
Source File: AbstractMROldApiSearchTest.java    From elasticsearch-hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testBasicSearch() throws Exception {
    JobConf conf = createJobConf();
    conf.set(ConfigurationOptions.ES_RESOURCE, resource(indexPrefix + "mroldapi-save", "data", clusterInfo.getMajorVersion()));

    JobClient.runJob(conf);
}
 
Example #24
Source File: AbstractPigSaveJsonTest.java    From elasticsearch-hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testUpdateWithId() throws Exception {
    String script =
            "REGISTER "+ Provisioner.ESHADOOP_TESTING_JAR + ";" +
            loadSource() +
            "STORE A INTO '"+resource("json-pig-update", "data", VERSION)+"' USING org.elasticsearch.hadoop.pig.EsStorage('"
                            + ConfigurationOptions.ES_WRITE_OPERATION + "=upsert','"
                            + ConfigurationOptions.ES_MAPPING_ID + "=number',"
                            + "'es.input.json=true');";
    pig.executeScript(script);
}
 
Example #25
Source File: AbstractMROldApiSearchTest.java    From elasticsearch-hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testSearchNonExistingIndex() throws Exception {
    JobConf conf = createJobConf();
    conf.setBoolean(ConfigurationOptions.ES_INDEX_READ_MISSING_AS_EMPTY, true);
    conf.set(ConfigurationOptions.ES_RESOURCE, resource("foobar", "save", clusterInfo.getMajorVersion()));

    JobClient.runJob(conf);
}
 
Example #26
Source File: AbstractMRNewApiSearchTest.java    From elasticsearch-hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testBasicWildSearch() throws Exception {
    Configuration conf = createConf();
    conf.set(ConfigurationOptions.ES_RESOURCE, resource(indexPrefix + "mrnew*-save", "data", clusterInfo.getMajorVersion()));

    new Job(conf).waitForCompletion(true);
}
 
Example #27
Source File: AbstractMRNewApiSearchTest.java    From elasticsearch-hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testBasicSearch() throws Exception {
    Configuration conf = createConf();
    conf.set(ConfigurationOptions.ES_RESOURCE, resource(indexPrefix + "mrnewapi-save", "data", clusterInfo.getMajorVersion()));

    new Job(conf).waitForCompletion(true);
}
 
Example #28
Source File: SettingsUtils.java    From elasticsearch-hadoop with Apache License 2.0 5 votes vote down vote up
public static List<NumberedInclude> getFieldArrayFilterInclude(Settings settings) {
    String includeString = settings.getReadFieldAsArrayInclude();
    List<String> includes = StringUtils.tokenize(includeString);
    try {
        return FieldFilter.toNumberedFilter(includes);
    } catch (EsHadoopIllegalArgumentException iae) {
        throw new EsHadoopIllegalArgumentException("Failed to parse [" +
                ConfigurationOptions.ES_READ_FIELD_AS_ARRAY_INCLUDE + "] option with value of [" +
                includeString + "]", iae);
    }
}
 
Example #29
Source File: InitializationUtils.java    From elasticsearch-hadoop with Apache License 2.0 5 votes vote down vote up
public static boolean setUserProviderIfNotSet(Settings settings, Class<? extends UserProvider> clazz, Log log) {
    if (!StringUtils.hasText(settings.getSecurityUserProviderClass())) {
        settings.setProperty(ConfigurationOptions.ES_SECURITY_USER_PROVIDER_CLASS, clazz.getName());
        Log logger = (log != null ? log : LogFactory.getLog(clazz));
        if (logger.isDebugEnabled()) {
            logger.debug(String.format("Using pre-defined user provider [%s] as default", settings.getSecurityUserProviderClass()));
        }
        return true;
    }
    return false;
}
 
Example #30
Source File: AbstractMROldApiSaveTest.java    From elasticsearch-hadoop with Apache License 2.0 5 votes vote down vote up
@Test
public void testIndexPatternWithFormattingAndId() throws Exception {
    JobConf conf = createJobConf();
    conf.set(ConfigurationOptions.ES_RESOURCE, resource("mroldapi-pattern-format-{@timestamp|YYYY-MM-dd}-with-id", "data", clusterInfo.getMajorVersion()));
    conf.set(ConfigurationOptions.ES_MAPPING_ID, "number");

    runJob(conf);
}