org.elasticsearch.test.ESTestCase Java Examples

The following examples show how to use org.elasticsearch.test.ESTestCase. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: IcuTransformFilterTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testTransformCyrillicLatinReverse() throws Exception {
    String source = "Rossijskaâ Federaciâ";
    String[] expected = new String[] { "Российская", "Федерация"};
    String resource = "icu_transform.json";
    Settings settings = Settings.builder()
            .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY));
    Tokenizer tokenizer = analysis.tokenizer.get("my_icu_tokenizer_cyr").create();
    tokenizer.setReader(new StringReader(source));
    TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_icu_transformer_cyr_reverse");
    TokenStream tokenStream = tokenFilter.create(tokenizer);
    assertTokenStreamContents(tokenStream, expected);
}
 
Example #2
Source File: DecompoundTokenFilterTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testWithSubwordsOnly() throws Exception {
    String source = "Das ist ein Schlüsselwort, ein Bindestrichwort";
    String[] expected = {
            "Da",
            "ist",
            "ein",
            "Schlüssel",
            "wort",
            "ein",
            "Bindestrich",
            "wort"
    };
    String resource = "keywords_analysis.json";
    Settings settings = Settings.builder()
            .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY), new CommonAnalysisPlugin());
    Analyzer analyzer = analysis.indexAnalyzers.get("with_subwords_only");
    assertNotNull(analyzer);
    assertTokenStreamContents(analyzer.tokenStream("test-field", source), expected);
}
 
Example #3
Source File: WordDelimiterFilter2Tests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testOffsetChange() throws Exception {
    String resource = "worddelimiter.json";
    Settings settings = Settings.builder()
            .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY));
    Tokenizer tokenizer = analysis.tokenizer.get("keyword").create();
    tokenizer.setReader(new StringReader("übelkeit"));
    TokenStream ts = analysis.tokenFilter.get("wd").create(tokenizer);

    assertTokenStreamContents(ts,
            new String[]{"übelkeit" },
            new int[]{0},
            new int[]{8});
}
 
Example #4
Source File: UnstemmedGermanNormalizationTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testFour() throws Exception {
    String source = "Prante, Jörg";
    String[] expected = {
            "prante",
            "jorg"
    };
    String resource = "unstemmed.json";
    Settings settings = Settings.builder()
            .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY), new CommonAnalysisPlugin());
    Analyzer analyzer = analysis.indexAnalyzers.get("unstemmed");
    assertTokenStreamContents(analyzer.tokenStream("test", new StringReader(source)), expected);
}
 
Example #5
Source File: UnstemmedGermanNormalizationTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testSix() throws Exception {
    String source = "Programmieren in C++ für Einsteiger";
    String[] expected = {
            "programmieren",
            "programmi",
            "c++",
            "einsteiger",
            "einsteig"
    };
    String resource = "unstemmed.json";
    Settings settings = Settings.builder()
            .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY), new CommonAnalysisPlugin());
    Analyzer analyzer = analysis.indexAnalyzers.get("default");
    assertTokenStreamContents(analyzer.tokenStream(null, new StringReader(source)), expected);
}
 
Example #6
Source File: IcuCollationAnalyzerTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testBasicUsage() throws Exception {
    Settings settings = Settings.builder()
            .put("index.analysis.analyzer.myAnalyzer.type", "icu_collation")
            .put("index.analysis.analyzer.myAnalyzer.language", "tr")
            .put("index.analysis.analyzer.myAnalyzer.strength", "primary")
            .put("index.analysis.analyzer.myAnalyzer.decomposition", "canonical")
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY));
    Analyzer analyzer = analysis.indexAnalyzers.get("myAnalyzer");
    TokenStream tsUpper = analyzer.tokenStream(null, "I WİLL USE TURKİSH CASING");
    BytesRef b1 = bytesFromTokenStream(tsUpper);
    TokenStream tsLower = analyzer.tokenStream(null, "ı will use turkish casıng");
    BytesRef b2 = bytesFromTokenStream(tsLower);
    assertTrue(compare(b1.bytes, b2.bytes) == 0);
}
 
Example #7
Source File: IcuCollationAnalyzerTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testNormalization() throws Exception {
    Settings settings = Settings.builder()
            .put("index.analysis.analyzer.myAnalyzer.type", "icu_collation")
            .put("index.analysis.analyzer.myAnalyzer.language", "tr")
            .put("index.analysis.analyzer.myAnalyzer.strength", "primary")
            .put("index.analysis.analyzer.myAnalyzer.decomposition", "canonical")
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY));
    Analyzer analyzer = analysis.indexAnalyzers.get("myAnalyzer");
    TokenStream tsUpper = analyzer.tokenStream(null, "I W\u0049\u0307LL USE TURKİSH CASING");
    BytesRef b1 = bytesFromTokenStream(tsUpper);
    TokenStream tsLower = analyzer.tokenStream(null, "ı will use turkish casıng");
    BytesRef b2 = bytesFromTokenStream(tsLower);
    assertTrue(compare(b1.bytes, b2.bytes) == 0);
}
 
Example #8
Source File: IcuCollationAnalyzerTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testSecondaryStrength() throws Exception {
    Settings settings = Settings.builder()
            .put("index.analysis.analyzer.myAnalyzer.type", "icu_collation")
            .put("index.analysis.analyzer.myAnalyzer.language", "en")
            .put("index.analysis.analyzer.myAnalyzer.strength", "secondary")
            .put("index.analysis.analyzer.myAnalyzer.decomposition", "no")
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY));
    Analyzer analyzer = analysis.indexAnalyzers.get("myAnalyzer");
    TokenStream tsUpper = analyzer.tokenStream("content", "TESTING");
    BytesRef b1 = bytesFromTokenStream(tsUpper);
    TokenStream tsLower = analyzer.tokenStream("content", "testing");
    BytesRef b2 = bytesFromTokenStream(tsLower);
    assertTrue(compare(b1.bytes, b2.bytes) == 0);
}
 
Example #9
Source File: UnstemmedGermanNormalizationTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testThree() throws Exception {
    String source = "978-1-4493-5854-9";
    String[] expected = {
         "978-1-4493-5854-9",
         "9781449358549"
    };
    String resource = "unstemmed.json";
    Settings settings = Settings.builder()
            .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY), new CommonAnalysisPlugin());
    Analyzer analyzer = analysis.indexAnalyzers.get("default");
    assertTokenStreamContents(analyzer.tokenStream(null, new StringReader(source)), expected);
}
 
Example #10
Source File: IcuCollationAnalyzerTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testUpperCaseFirst() throws Exception {
    Settings settings = Settings.builder()
            .put("index.analysis.analyzer.myAnalyzer.type", "icu_collation")
            .put("index.analysis.analyzer.myAnalyzer.language", "en")
            .put("index.analysis.analyzer.myAnalyzer.strength", "tertiary")
            .put("index.analysis.analyzer.myAnalyzer.caseFirst", "upper")
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY));
    Analyzer analyzer = analysis.indexAnalyzers.get("myAnalyzer");
    String lower = "resume";
    String upper = "Resume";
    TokenStream tsLower = analyzer.tokenStream(null, lower);
    BytesRef b1 = bytesFromTokenStream(tsLower);
    TokenStream tsUpper = analyzer.tokenStream(null, upper);
    BytesRef b2 = bytesFromTokenStream(tsUpper);
    assertTrue(compare(b2.bytes, b1.bytes) < 0);
}
 
Example #11
Source File: IcuNormalizeCharTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testNormalize() throws Exception {
    String source = "Jörg Prante";
    String resource = "icu_normalize.json";
    Settings settings = Settings.builder()
            .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY));
    Reader charFilter = analysis.charFilter.get("my_icu_normalizer").create(new StringReader(source));
    StringBuilder sb = new StringBuilder();
    int ch;
    while ((ch = charFilter.read()) != -1) {
        sb.append((char)ch);
    }
    assertEquals("jorg prante", sb.toString());
}
 
Example #12
Source File: IcuFoldingFilterTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testFoldingCharFilter() throws Exception {
    String source = "Jörg Prante";
    String resource = "icu_folding.json";
    Settings settings = Settings.builder()
            .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY));
    Reader charFilter = analysis.charFilter.get("my_icu_folder").create(new StringReader(source));
    StringBuilder sb = new StringBuilder();
    int ch;
    while ((ch = charFilter.read()) != -1) {
        sb.append((char)ch);
    }
    assertEquals("jorg prante", sb.toString());
}
 
Example #13
Source File: IcuTransformFilterTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testTransformHanLatin() throws Exception {
    String source = "中国";
    String[] expected =  new String[] { "zhōng guó" };
    String resource = "icu_transform.json";
    Settings settings = Settings.builder()
            .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY));
    Tokenizer tokenizer = analysis.tokenizer.get("my_icu_tokenizer_han").create();
    tokenizer.setReader(new StringReader(source));
    TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_icu_transformer_han");
    TokenStream tokenStream = tokenFilter.create(tokenizer);
    assertTokenStreamContents(tokenStream, expected);
}
 
Example #14
Source File: IcuTransformFilterTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testTransformKatakanaHiragana() throws Exception {
    String source = "ヒラガナ";
    String[] expected =  new String[] { "ひらがな" };
    String resource = "icu_transform.json";
    Settings settings = Settings.builder()
            .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY));
    Tokenizer tokenizer = analysis.tokenizer.get("my_icu_tokenizer_katakana").create();
    tokenizer.setReader(new StringReader(source));
    TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_icu_transformer_katakana");
    TokenStream tokenStream = tokenFilter.create(tokenizer);
    assertTokenStreamContents(tokenStream, expected);
}
 
Example #15
Source File: IcuTransformFilterTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testTransformCyrillicLatin() throws Exception {
    String source = "Российская Федерация";
    String[] expected = new String[] { "Rossijskaâ", "Federaciâ" };
    String resource = "icu_transform.json";
    Settings settings = Settings.builder()
            .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY));
    Tokenizer tokenizer = analysis.tokenizer.get("my_icu_tokenizer_cyr").create();
    tokenizer.setReader(new StringReader(source));
    TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_icu_transformer_cyr");
    TokenStream tokenStream = tokenFilter.create(tokenizer);
    assertTokenStreamContents(tokenStream, expected);
}
 
Example #16
Source File: SymbolnameTokenFilterTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testSimple() throws Exception {

        String source = "Programmieren mit C++";

        String[] expected = {
                "Programmieren",
                "mit",
                "C++",
                "C __PLUSSIGN__ __PLUSSIGN__",
                "C",
                "__PLUSSIGN__",
                "__PLUSSIGN__"
        };
        ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
                Settings.EMPTY,
                new BundlePlugin(Settings.EMPTY));
        TokenFilterFactory tokenFilter = analysis.tokenFilter.get("symbolname");
        Tokenizer tokenizer = analysis.tokenizer.get("whitespace").create();
        tokenizer.setReader(new StringReader(source));
        assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
    }
 
Example #17
Source File: MockBigArrays.java    From crate with Apache License 2.0 6 votes vote down vote up
public static void ensureAllArraysAreReleased() throws Exception {
    final Map<Object, Object> masterCopy = new HashMap<>(ACQUIRED_ARRAYS);
    if (!masterCopy.isEmpty()) {
        // not empty, we might be executing on a shared cluster that keeps on obtaining
        // and releasing arrays, lets make sure that after a reasonable timeout, all master
        // copy (snapshot) have been released
        boolean success = ESTestCase.awaitBusy(() -> Sets.haveEmptyIntersection(masterCopy.keySet(), ACQUIRED_ARRAYS.keySet()));
        if (!success) {
            masterCopy.keySet().retainAll(ACQUIRED_ARRAYS.keySet());
            ACQUIRED_ARRAYS.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on
            if (!masterCopy.isEmpty()) {
                Iterator<Object> causes = masterCopy.values().iterator();
                Object firstCause = causes.next();
                RuntimeException exception = new RuntimeException(masterCopy.size() + " arrays have not been released",
                        firstCause instanceof Throwable ? (Throwable) firstCause : null);
                while (causes.hasNext()) {
                    Object cause = causes.next();
                    if (cause instanceof Throwable) {
                        exception.addSuppressed((Throwable) cause);
                    }
                }
                throw exception;
            }
        }
    }
}
 
Example #18
Source File: IcuTransformFilterTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testTransformNFD() throws Exception {
    String source = "Alphabētikós Katálogos";
    String[] expected = new String[] { "Alphabetikos", "Katalogos" };
    String resource = "icu_transform.json";
    Settings settings = Settings.builder()
            .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
            .build();
    ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
            settings,
            new BundlePlugin(Settings.EMPTY));
    Tokenizer tokenizer = analysis.tokenizer.get("my_icu_tokenizer_nfd").create();
    tokenizer.setReader(new StringReader(source));
    TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_icu_transformer_nfd");
    TokenStream tokenStream = tokenFilter.create(tokenizer);
    assertTokenStreamContents(tokenStream, expected);
}
 
Example #19
Source File: TestShardRouting.java    From crate with Apache License 2.0 6 votes vote down vote up
private static RecoverySource buildRecoveryTarget(boolean primary, ShardRoutingState state) {
    switch (state) {
        case UNASSIGNED:
        case INITIALIZING:
            if (primary) {
                return ESTestCase.randomFrom(RecoverySource.EmptyStoreRecoverySource.INSTANCE,
                    RecoverySource.ExistingStoreRecoverySource.INSTANCE);
            } else {
                return RecoverySource.PeerRecoverySource.INSTANCE;
            }
        case STARTED:
        case RELOCATING:
            return null;
        default:
            throw new IllegalStateException("illegal state");
    }
}
 
Example #20
Source File: BaseformTokenFilterTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testTwo() throws Exception {

        String source = "Das sind Autos, die Nudeln transportieren.";

        String[] expected = {
                "Das",
                "Das",
                "sind",
                "sind",
                "Autos",
                "Auto",
                "die",
                "der",
                "Nudeln",
                "Nudel",
                "transportieren",
                "transportieren"
        };
        ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
                Settings.EMPTY,
                new BundlePlugin(Settings.EMPTY));
        TokenFilterFactory tokenFilter = analysis.tokenFilter.get("baseform");
        Tokenizer tokenizer = analysis.tokenizer.get("standard").create();
        tokenizer.setReader(new StringReader(source));
        assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
    }
 
Example #21
Source File: BaseformTokenFilterTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testThree() throws Exception {

        String source = "wurde zum tollen gemacht";

        String[] expected = {
                "wurde",
                "werden",
                "zum",
                "zum",
                "tollen",
                "tollen",
                "gemacht",
                "machen"
        };
        ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
                Settings.EMPTY,
                new BundlePlugin(Settings.EMPTY));
        TokenFilterFactory tokenFilter = analysis.tokenFilter.get("baseform");
        Tokenizer tokenizer = analysis.tokenizer.get("standard").create();
        tokenizer.setReader(new StringReader(source));
        assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
    }
 
Example #22
Source File: HyphenTokenizerTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testOne() throws Exception {

        String source = "Das ist ein Bindestrich-Wort.";

        String[] expected = {
                "Das",
                "ist",
                "ein",
                "Bindestrich-Wort",
                "BindestrichWort",
                "Wort",
                "Bindestrich"
        };
        String resource = "hyphen_tokenizer.json";
        Settings settings = Settings.builder()
                .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
                .build();
        ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
                settings,
                new BundlePlugin(Settings.EMPTY));
        Tokenizer tokenizer = analysis.tokenizer.get("my_hyphen_tokenizer").create();
        tokenizer.setReader(new StringReader(source));
        TokenFilterFactory tokenFilter = analysis.tokenFilter.get("hyphen");
        TokenStream tokenStream = tokenFilter.create(tokenizer);
        assertTokenStreamContents(tokenStream, expected);
    }
 
Example #23
Source File: HyphenTokenizerTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testTwo() throws Exception {

        String source = "Das E-Book muss dringend zum Buchbinder.";

        String[] expected = {
                "Das",
                "E-Book",
                "EBook",
                "Book",
                "muss",
                "dringend",
                "zum",
                "Buchbinder"
        };
        String resource = "hyphen_tokenizer.json";
        Settings settings = Settings.builder()
                .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
                .build();
        ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
                settings,
                new BundlePlugin(Settings.EMPTY));
        Tokenizer tokenizer = analysis.tokenizer.get("my_icu_tokenizer").create();
        tokenizer.setReader(new StringReader(source));
        TokenFilterFactory tokenFilter = analysis.tokenFilter.get("hyphen");
        assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
    }
 
Example #24
Source File: HyphenTokenizerTests.java    From elasticsearch-plugin-bundle with GNU Affero General Public License v3.0 6 votes vote down vote up
public void testSix() throws Exception {

        String source = "E-Book";

        String[] expected = {
                "E-Book",
                "EBook",
                "Book"
        };

        String resource = "hyphen_tokenizer.json";
        Settings settings = Settings.builder()
                .loadFromStream(resource, getClass().getResourceAsStream(resource), true)
                .build();
        ESTestCase.TestAnalysis analysis = ESTestCase.createTestAnalysis(new Index("test", "_na_"),
                settings,
                new BundlePlugin(Settings.EMPTY));
        Tokenizer tokenizer = analysis.tokenizer.get("my_hyphen_tokenizer").create();
        tokenizer.setReader(new StringReader(source));
        TokenFilterFactory tokenFilter = analysis.tokenFilter.get("hyphen");
        assertTokenStreamContents(tokenFilter.create(tokenizer), expected);
    }
 
Example #25
Source File: MockPageCacheRecycler.java    From crate with Apache License 2.0 6 votes vote down vote up
public static void ensureAllPagesAreReleased() throws Exception {
    final Map<Object, Throwable> masterCopy = new HashMap<>(ACQUIRED_PAGES);
    if (!masterCopy.isEmpty()) {
        // not empty, we might be executing on a shared cluster that keeps on obtaining
        // and releasing pages, lets make sure that after a reasonable timeout, all master
        // copy (snapshot) have been released
        boolean success =
                ESTestCase.awaitBusy(() -> Sets.haveEmptyIntersection(masterCopy.keySet(), ACQUIRED_PAGES.keySet()));
        if (!success) {
            masterCopy.keySet().retainAll(ACQUIRED_PAGES.keySet());
            ACQUIRED_PAGES.keySet().removeAll(masterCopy.keySet()); // remove all existing master copy we will report on
            if (!masterCopy.isEmpty()) {
                Iterator<Throwable> causes = masterCopy.values().iterator();
                Throwable firstCause = causes.next();
                RuntimeException exception = new RuntimeException(masterCopy.size() + " pages have not been released", firstCause);
                while (causes.hasNext()) {
                    exception.addSuppressed(causes.next());
                }
                throw exception;
            }
        }
    }
}
 
Example #26
Source File: LemmagenAnalysisTest.java    From elasticsearch-analysis-lemmagen with Apache License 2.0 6 votes vote down vote up
public ESTestCase.TestAnalysis createAnalysis() throws IOException {
  InputStream lexicon = LemmagenAnalysisTest.class.getResourceAsStream("/org/elasticsearch/index/analysis/cs.lem");

  Path home = createTempDir();
  Path config = home.resolve("config" + "/" + LemmagenFilterFactory.DEFAULT_DIRECTORY);
  Files.createDirectories(config);
  Files.copy(lexicon, config.resolve("cs.lem"));

  String path = "/org/elasticsearch/index/analysis/lemmagen.json";

  Settings settings = Settings.builder().loadFromStream(path, getClass().getResourceAsStream(path), false)
      .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT).put(Environment.PATH_HOME_SETTING.getKey(), home)
      .build();

  return AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new AnalysisLemmagenPlugin());
}
 
Example #27
Source File: LocalCheckpointTrackerTests.java    From crate with Apache License 2.0 6 votes vote down vote up
public void testSimpleOverFlow() {
    List<Long> seqNoList = new ArrayList<>();
    final boolean aligned = randomBoolean();
    final int maxOps = BIT_SET_SIZE * randomIntBetween(1, 5) + (aligned ? 0 : randomIntBetween(1, BIT_SET_SIZE - 1));

    for (long i = 0; i < maxOps; i++) {
        seqNoList.add(i);
    }
    Collections.shuffle(seqNoList, random());
    for (Long seqNo : seqNoList) {
        tracker.markSeqNoAsProcessed(seqNo);
    }
    assertThat(tracker.processedCheckpoint.get(), equalTo(maxOps - 1L));
    assertThat(tracker.processedSeqNo.size(), equalTo(aligned ? 0 : 1));
    if (aligned == false) {
        assertThat(tracker.processedSeqNo.keys().iterator().next().value, equalTo(tracker.processedCheckpoint.get() / BIT_SET_SIZE));
    }
    assertThat(tracker.hasProcessed(randomFrom(seqNoList)), equalTo(true));
    final long notCompletedSeqNo = randomValueOtherThanMany(seqNoList::contains, ESTestCase::randomNonNegativeLong);
    assertThat(tracker.hasProcessed(notCompletedSeqNo), equalTo(false));
}
 
Example #28
Source File: AnalysisTestsHelper.java    From crate with Apache License 2.0 6 votes vote down vote up
public static ESTestCase.TestAnalysis createTestAnalysisFromSettings(
        final Settings settings,
        final Path configPath,
        final AnalysisPlugin... plugins) throws IOException {
    final Settings actualSettings;
    if (settings.get(IndexMetaData.SETTING_VERSION_CREATED) == null) {
        actualSettings = Settings.builder().put(settings).put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
    } else {
        actualSettings = settings;
    }
    final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", actualSettings);
    final AnalysisRegistry analysisRegistry =
            new AnalysisModule(new Environment(actualSettings, configPath), Arrays.asList(plugins)).getAnalysisRegistry();
    return new ESTestCase.TestAnalysis(analysisRegistry.build(indexSettings),
            analysisRegistry.buildTokenFilterFactories(indexSettings),
            analysisRegistry.buildTokenizerFactories(indexSettings),
            analysisRegistry.buildCharFilterFactories(indexSettings));
}
 
Example #29
Source File: MessagesTests.java    From crate with Apache License 2.0 6 votes vote down vote up
public void testStartJoinRequestEqualsHashCodeSerialization() {
    StartJoinRequest initialStartJoinRequest = new StartJoinRequest(createNode(randomAlphaOfLength(10)), randomNonNegativeLong());
    // Note: the explicit cast of the CopyFunction is needed for some IDE (specifically Eclipse 4.8.0) to infer the right type
    EqualsHashCodeTestUtils.checkEqualsAndHashCode(initialStartJoinRequest,
            (CopyFunction<StartJoinRequest>) startJoinRequest -> copyWriteable(startJoinRequest, writableRegistry(),
                    StartJoinRequest::new),
        startJoinRequest -> {
            switch (randomInt(1)) {
                case 0:
                    // change sourceNode
                    return new StartJoinRequest(createNode(randomAlphaOfLength(20)), startJoinRequest.getTerm());
                case 1:
                    // change term
                    return new StartJoinRequest(startJoinRequest.getSourceNode(),
                        randomValueOtherThan(startJoinRequest.getTerm(), ESTestCase::randomNonNegativeLong));
                default:
                    throw new AssertionError();
            }
        });
}
 
Example #30
Source File: MessagesTests.java    From crate with Apache License 2.0 6 votes vote down vote up
public void testApplyCommitEqualsHashCodeSerialization() {
    ApplyCommitRequest initialApplyCommit = new ApplyCommitRequest(createNode(randomAlphaOfLength(10)), randomNonNegativeLong(),
        randomNonNegativeLong());
    // Note: the explicit cast of the CopyFunction is needed for some IDE (specifically Eclipse 4.8.0) to infer the right type
    EqualsHashCodeTestUtils.checkEqualsAndHashCode(initialApplyCommit,
            (CopyFunction<ApplyCommitRequest>) applyCommit -> copyWriteable(applyCommit, writableRegistry(), ApplyCommitRequest::new),
        applyCommit -> {
            switch (randomInt(2)) {
                case 0:
                    // change sourceNode
                    return new ApplyCommitRequest(createNode(randomAlphaOfLength(20)), applyCommit.getTerm(), applyCommit.getVersion());
                case 1:
                    // change term
                    return new ApplyCommitRequest(applyCommit.getSourceNode(),
                        randomValueOtherThan(applyCommit.getTerm(), ESTestCase::randomNonNegativeLong), applyCommit.getVersion());
                case 2:
                    // change version
                    return new ApplyCommitRequest(applyCommit.getSourceNode(), applyCommit.getTerm(),
                        randomValueOtherThan(applyCommit.getVersion(), ESTestCase::randomNonNegativeLong));
                default:
                    throw new AssertionError();
            }
        });
}