org.apache.lucene.store.Directory Java Examples
The following examples show how to use
org.apache.lucene.store.Directory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestAllFilesHaveCodecHeader.java From lucene-solr with Apache License 2.0 | 6 votes |
public void test() throws Exception { Directory dir = newDirectory(); IndexWriterConfig conf = newIndexWriterConfig(new MockAnalyzer(random())); conf.setCodec(TestUtil.getDefaultCodec()); RandomIndexWriter riw = new RandomIndexWriter(random(), dir, conf); // Use LineFileDocs so we (hopefully) get most Lucene features // tested, e.g. IntPoint was recently added to it: LineFileDocs docs = new LineFileDocs(random()); for (int i = 0; i < 100; i++) { riw.addDocument(docs.nextDoc()); if (random().nextInt(7) == 0) { riw.commit(); } if (random().nextInt(20) == 0) { riw.deleteDocuments(new Term("docid", Integer.toString(i))); } if (random().nextInt(15) == 0) { riw.updateNumericDocValue(new Term("docid", Integer.toString(i)), "docid_intDV", Long.valueOf(i)); } } riw.close(); checkHeaders(dir, new HashMap<String,String>()); dir.close(); }
Example #2
Source File: TestQueryParser.java From lucene-solr with Apache License 2.0 | 6 votes |
private boolean isAHit(Query q, String content, Analyzer analyzer) throws IOException{ Directory ramDir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), ramDir, analyzer); Document doc = new Document(); FieldType fieldType = new FieldType(); fieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS); fieldType.setTokenized(true); fieldType.setStored(true); Field field = new Field(FIELD, content, fieldType); doc.add(field); writer.addDocument(doc); writer.close(); DirectoryReader ir = DirectoryReader.open(ramDir); IndexSearcher is = new IndexSearcher(ir); long hits = is.count(q); ir.close(); ramDir.close(); if (hits == 1){ return true; } else { return false; } }
Example #3
Source File: TestIndexWriter.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testHasUncommittedChangesAfterException() throws IOException { Analyzer analyzer = new MockAnalyzer(random()); Directory directory = newDirectory(); // we don't use RandomIndexWriter because it might add more docvalues than we expect !!!! IndexWriterConfig iwc = newIndexWriterConfig(analyzer); iwc.setMergePolicy(newLogMergePolicy()); IndexWriter iwriter = new IndexWriter(directory, iwc); Document doc = new Document(); doc.add(new SortedDocValuesField("dv", new BytesRef("foo!"))); doc.add(new SortedDocValuesField("dv", new BytesRef("bar!"))); expectThrows(IllegalArgumentException.class, () -> { iwriter.addDocument(doc); }); iwriter.commit(); assertFalse(iwriter.hasUncommittedChanges()); iwriter.close(); directory.close(); }
Example #4
Source File: TestReaderPool.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testDrop() throws IOException { Directory directory = newDirectory(); FieldInfos.FieldNumbers fieldNumbers = buildIndex(directory); StandardDirectoryReader reader = (StandardDirectoryReader) DirectoryReader.open(directory); SegmentInfos segmentInfos = reader.segmentInfos.clone(); ReaderPool pool = new ReaderPool(directory, directory, segmentInfos, fieldNumbers, () -> 0l, null, null, null); SegmentCommitInfo commitInfo = RandomPicks.randomFrom(random(), segmentInfos.asList()); ReadersAndUpdates readersAndUpdates = pool.get(commitInfo, true); assertSame(readersAndUpdates, pool.get(commitInfo, false)); assertTrue(pool.drop(commitInfo)); if (random().nextBoolean()) { assertFalse(pool.drop(commitInfo)); } assertNull(pool.get(commitInfo, false)); pool.release(readersAndUpdates, random().nextBoolean()); IOUtils.close(pool, reader, directory); }
Example #5
Source File: TestDirectoryTaxonomyWriter.java From lucene-solr with Apache License 2.0 | 6 votes |
@Test public void testBackwardsCompatibility() throws Exception { // tests that if the taxonomy index doesn't have the INDEX_EPOCH // property (supports pre-3.6 indexes), all still works. Directory dir = newDirectory(); // create an empty index first, so that DirTaxoWriter initializes indexEpoch to 1. new IndexWriter(dir, new IndexWriterConfig(null)).close(); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(dir, OpenMode.CREATE_OR_APPEND, NO_OP_CACHE); taxoWriter.close(); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(dir); assertEquals(1, Integer.parseInt(taxoReader.getCommitUserData().get(DirectoryTaxonomyWriter.INDEX_EPOCH))); assertNull(TaxonomyReader.openIfChanged(taxoReader)); taxoReader.close(); dir.close(); }
Example #6
Source File: TestDirectoryReader.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testIsCurrent() throws Exception { Directory d = newDirectory(); IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))); addDocumentWithFields(writer); writer.close(); // set up reader: DirectoryReader reader = DirectoryReader.open(d); assertTrue(reader.isCurrent()); // modify index by adding another document: writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.APPEND)); addDocumentWithFields(writer); writer.close(); assertFalse(reader.isCurrent()); // re-create index: writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())) .setOpenMode(OpenMode.CREATE)); addDocumentWithFields(writer); writer.close(); assertFalse(reader.isCurrent()); reader.close(); d.close(); }
Example #7
Source File: EngineTestCase.java From crate with Apache License 2.0 | 6 votes |
protected InternalEngine createEngine(@Nullable IndexWriterFactory indexWriterFactory, @Nullable BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier, @Nullable ToLongBiFunction<Engine, Engine.Operation> seqNoForOperation, EngineConfig config) throws IOException { final Store store = config.getStore(); final Directory directory = store.directory(); if (Lucene.indexExists(directory) == false) { store.createEmpty(); final String translogUuid = Translog.createEmptyTranslog(config.getTranslogConfig().getTranslogPath(), SequenceNumbers.NO_OPS_PERFORMED, shardId, primaryTerm.get()); store.associateIndexWithNewTranslog(translogUuid); } InternalEngine internalEngine = createInternalEngine(indexWriterFactory, localCheckpointTrackerSupplier, seqNoForOperation, config); internalEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE); return internalEngine; }
Example #8
Source File: SolrCoreCheckLockOnStartupTest.java From lucene-solr with Apache License 2.0 | 6 votes |
@Test public void testSimpleLockErrorOnStartup() throws Exception { Directory directory = newFSDirectory(new File(initAndGetDataDir(), "index").toPath(), SimpleFSLockFactory.INSTANCE); //creates a new IndexWriter without releasing the lock yet IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(null)); ignoreException("locked"); try { System.setProperty("solr.tests.lockType",DirectoryFactory.LOCK_TYPE_SIMPLE); //opening a new core on the same index initCore("solrconfig-basic.xml", "schema.xml"); if (checkForCoreInitException(LockObtainFailedException.class)) return; fail("Expected " + LockObtainFailedException.class.getSimpleName()); } finally { System.clearProperty("solr.tests.lockType"); unIgnoreException("locked"); indexWriter.close(); directory.close(); deleteCore(); } }
Example #9
Source File: NodeEnvironment.java From crate with Apache License 2.0 | 6 votes |
/** * Acquires, then releases, all {@code write.lock} files in the given * shard paths. The "write.lock" file is assumed to be under the shard * path's "index" directory as used by Elasticsearch. * * @throws LockObtainFailedException if any of the locks could not be acquired */ public static void acquireFSLockForPaths(IndexSettings indexSettings, Path... shardPaths) throws IOException { Lock[] locks = new Lock[shardPaths.length]; Directory[] dirs = new Directory[shardPaths.length]; try { for (int i = 0; i < shardPaths.length; i++) { // resolve the directory the shard actually lives in Path p = shardPaths[i].resolve("index"); // open a directory (will be immediately closed) on the shard's location dirs[i] = new SimpleFSDirectory(p, indexSettings.getValue(FsDirectoryService.INDEX_LOCK_FACTOR_SETTING)); // create a lock for the "write.lock" file try { locks[i] = dirs[i].obtainLock(IndexWriter.WRITE_LOCK_NAME); } catch (IOException ex) { throw new LockObtainFailedException("unable to acquire " + IndexWriter.WRITE_LOCK_NAME + " for " + p, ex); } } } finally { IOUtils.closeWhileHandlingException(locks); IOUtils.closeWhileHandlingException(dirs); } }
Example #10
Source File: LuceneSearch.java From mysiteforme with Apache License 2.0 | 6 votes |
/** * 根据ID更新搜索内容 * @param blogArticle * @throws IOException */ public static void updateIndexById(BlogArticle blogArticle) throws IOException{ Directory directory = FSDirectory.open(Paths.get(dir));// 打开文件索引目录 Analyzer analyzer = new IKAnalyzer(); IndexWriterConfig indexWriterConfig = new IndexWriterConfig(analyzer); //创建索引写入对象 IndexWriter writer = new IndexWriter(directory,indexWriterConfig); Document doc = new Document(); doc.add(new LongPoint("id",blogArticle.getId())); doc.add(new TextField("title",blogArticle.getTitle(), Field.Store.YES)); doc.add(new TextField("marks",blogArticle.getMarks()==null?"":blogArticle.getMarks(),Field.Store.YES)); doc.add(new TextField("text",blogArticle.getText()==null?"":blogArticle.getText(),Field.Store.YES)); doc.add(new StoredField("href",blogArticle.getBlogChannel().getHref())); doc.add(new StoredField("show_pic",blogArticle.getShowPic()==null?"":blogArticle.getShowPic())); writer.updateDocument(new Term("id", blogArticle.getId().toString()), doc); writer.commit();// 提交 writer.close();// 关闭 }
Example #11
Source File: InternalEngine.java From crate with Apache License 2.0 | 6 votes |
@Override protected void handleMergeException(final Directory dir, final Throwable exc) { engineConfig.getThreadPool().generic().execute(new AbstractRunnable() { @Override public void onFailure(Exception e) { logger.debug("merge failure action rejected", e); } @Override protected void doRun() throws Exception { /* * We do this on another thread rather than the merge thread that we are initially called on so that we have complete * confidence that the call stack does not contain catch statements that would cause the error that might be thrown * here from being caught and never reaching the uncaught exception handler. */ failEngine("merge failed", new MergePolicy.MergeException(exc, dir)); } }); }
Example #12
Source File: TestControlledRealTimeReopenThread.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testEvilSearcherFactory() throws Exception { final Directory dir = newDirectory(); final RandomIndexWriter w = new RandomIndexWriter(random(), dir); w.commit(); final IndexReader other = DirectoryReader.open(dir); final SearcherFactory theEvilOne = new SearcherFactory() { @Override public IndexSearcher newSearcher(IndexReader ignored, IndexReader previous) { return LuceneTestCase.newSearcher(other); } }; expectThrows(IllegalStateException.class, () -> { new SearcherManager(w.w, false, false, theEvilOne); }); w.close(); other.close(); dir.close(); }
Example #13
Source File: TestIndexWriterOnOldIndex.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testOpenModeAndCreatedVersion() throws IOException { assumeTrue("Reenable when 8.0 is released", false); InputStream resource = getClass().getResourceAsStream("index.single-empty-doc.8.0.0.zip"); assertNotNull(resource); Path path = createTempDir(); TestUtil.unzip(resource, path); Directory dir = newFSDirectory(path); for (OpenMode openMode : OpenMode.values()) { Directory tmpDir = newDirectory(dir); assertEquals(7 /** 7.0.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor()); IndexWriter w = new IndexWriter(tmpDir, newIndexWriterConfig().setOpenMode(openMode)); w.commit(); w.close(); switch (openMode) { case CREATE: assertEquals(Version.LATEST.major, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor()); break; default: assertEquals(7 /** 7.0.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor()); } tmpDir.close(); } dir.close(); }
Example #14
Source File: BaseCompoundFormatTestCase.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testRenameFileDisabled() throws IOException { final String testfile = "_123.test"; Directory dir = newDirectory(); IndexOutput out = dir.createOutput(testfile, IOContext.DEFAULT); out.writeInt(3); out.close(); SegmentInfo si = newSegmentInfo(dir, "_123"); si.setFiles(Collections.emptyList()); si.getCodec().compoundFormat().write(dir, si, IOContext.DEFAULT); Directory cfs = si.getCodec().compoundFormat().getCompoundReader(dir, si, IOContext.DEFAULT); expectThrows(UnsupportedOperationException.class, () -> { cfs.rename(testfile, "bogus"); }); cfs.close(); dir.close(); }
Example #15
Source File: HdfsDirectoryTest.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testCreateTempFiles() throws IOException { String file1; try (Directory dir = new HdfsDirectory(directoryPath, directoryConf); IndexOutput out = dir.createTempOutput("foo", "bar", IOContext.DEFAULT)) { out.writeByte((byte) 42); file1 = out.getName(); } assertTrue(file1.startsWith("foo_bar")); assertTrue(file1.endsWith(".tmp")); // Create the directory again to force the counter to be reset String file2; try (Directory dir = new HdfsDirectory(directoryPath, directoryConf); IndexOutput out = dir.createTempOutput("foo", "bar", IOContext.DEFAULT)) { out.writeByte((byte) 42); file2 = out.getName(); } assertTrue(file2.startsWith("foo_bar")); assertTrue(file2.endsWith(".tmp")); assertNotEquals(file1, file2); }
Example #16
Source File: TestIndexWriterMaxDocs.java From lucene-solr with Apache License 2.0 | 6 votes |
public void testUpdateDocument() throws Exception { setIndexWriterMaxDocs(10); try { Directory dir = newDirectory(); IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null)); for(int i=0;i<10;i++) { w.addDocument(new Document()); } // 11th document should fail: expectThrows(IllegalArgumentException.class, () -> { w.updateDocument(new Term("field", "foo"), new Document()); }); w.close(); dir.close(); } finally { restoreIndexWriterMaxDocs(); } }
Example #17
Source File: BaseGeoPointTestCase.java From lucene-solr with Apache License 2.0 | 6 votes |
/** test we can search for a polygon */ public void testPolygonBasics() throws Exception { assumeTrue("Impl does not support polygons", supportsPolygons()); Directory dir = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), dir); // add a doc with a point Document document = new Document(); addPointToDoc("field", document, 18.313694, -65.227444); writer.addDocument(document); // search and verify we found our doc IndexReader reader = writer.getReader(); IndexSearcher searcher = newSearcher(reader); assertEquals(1, searcher.count(newPolygonQuery("field", new Polygon( new double[] { 18, 18, 19, 19, 18 }, new double[] { -66, -65, -65, -66, -66 })))); reader.close(); writer.close(); dir.close(); }
Example #18
Source File: IntermediateForm.java From hadoop-gpu with Apache License 2.0 | 6 votes |
/** * This method is used by the index update combiner and process an * intermediate form into the current intermediate form. More specifically, * the input intermediate forms are a single-document ram index and/or a * single delete term. * @param form the input intermediate form * @throws IOException */ public void process(IntermediateForm form) throws IOException { if (form.deleteList.size() > 0) { deleteList.addAll(form.deleteList); } if (form.dir.sizeInBytes() > 0) { if (writer == null) { writer = createWriter(); } writer.addIndexesNoOptimize(new Directory[] { form.dir }); numDocs++; } }
Example #19
Source File: TestHighFrequencyDictionary.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testEmpty() throws Exception { Directory dir = newDirectory(); IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))); writer.commit(); writer.close(); IndexReader ir = DirectoryReader.open(dir); Dictionary dictionary = new HighFrequencyDictionary(ir, "bogus", 0.1f); BytesRefIterator tf = dictionary.getEntryIterator(); assertNull(tf.next()); dir.close(); }
Example #20
Source File: SearchController.java From bookshop with MIT License | 5 votes |
private Directory createIndex(IKAnalyzer analyzer) throws IOException { Directory index = new RAMDirectory(); IndexWriterConfig config = new IndexWriterConfig(analyzer); IndexWriter writer = new IndexWriter(index,config); List<Book> books = bookService.listByBookType(1); for (Book book : books) { addDoc(writer,book); } writer.close(); return index; }
Example #21
Source File: TestLucene.java From bookshop with MIT License | 5 votes |
public static void main(String[] args) throws IOException, ParseException { // 1. 准备中文分词器 IKAnalyzer analyzer = new IKAnalyzer(); // 2. 索引 List<String> productNames = new ArrayList<>(); productNames.add("飞利浦led灯泡e27螺口暖白球泡灯家用照明超亮节能灯泡转色温灯泡"); productNames.add("飞利浦led灯泡e14螺口蜡烛灯泡3W尖泡拉尾节能灯泡暖黄光源Lamp"); productNames.add("雷士照明 LED灯泡 e27大螺口节能灯3W球泡灯 Lamp led节能灯泡"); productNames.add("飞利浦 led灯泡 e27螺口家用3w暖白球泡灯节能灯5W灯泡LED单灯7w"); productNames.add("飞利浦led小球泡e14螺口4.5w透明款led节能灯泡照明光源lamp单灯"); productNames.add("飞利浦蒲公英护眼台灯工作学习阅读节能灯具30508带光源"); productNames.add("欧普照明led灯泡蜡烛节能灯泡e14螺口球泡灯超亮照明单灯光源"); productNames.add("欧普照明led灯泡节能灯泡超亮光源e14e27螺旋螺口小球泡暖黄家用"); productNames.add("聚欧普照明led灯泡节能灯泡e27螺口球泡家用led照明单灯超亮光源"); Directory index = createIndex(analyzer, productNames); // 3. 查询器 String keyword = "护眼带光源"; Query query = new QueryParser("name",analyzer).parse(keyword); // 4. 搜索 IndexReader reader = DirectoryReader.open(index); IndexSearcher searcher = new IndexSearcher(reader); int numberPerPage = 1000; System.out.printf("当前一共有%d条数据%n",productNames.size()); System.out.printf("查询关键字是:\"%s\"%n",keyword); ScoreDoc[] hits = searcher.search(query,numberPerPage).scoreDocs; // 5. 显示查询结果 showSearchResults(searcher, hits, query, analyzer); }
Example #22
Source File: TestIndexWriter.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testPositionIncrementGapEmptyField() throws Exception { Directory dir = newDirectory(); MockAnalyzer analyzer = new MockAnalyzer(random()); analyzer.setPositionIncrementGap( 100 ); IndexWriter w = new IndexWriter(dir, newIndexWriterConfig(analyzer)); Document doc = new Document(); FieldType customType = new FieldType(TextField.TYPE_NOT_STORED); customType.setStoreTermVectors(true); customType.setStoreTermVectorPositions(true); Field f = newField("field", "", customType); Field f2 = newField("field", "crunch man", customType); doc.add(f); doc.add(f2); w.addDocument(doc); w.close(); IndexReader r = DirectoryReader.open(dir); Terms tpv = r.getTermVectors(0).terms("field"); TermsEnum termsEnum = tpv.iterator(); assertNotNull(termsEnum.next()); PostingsEnum dpEnum = termsEnum.postings(null, PostingsEnum.ALL); assertNotNull(dpEnum); assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS); assertEquals(1, dpEnum.freq()); assertEquals(100, dpEnum.nextPosition()); assertNotNull(termsEnum.next()); dpEnum = termsEnum.postings(dpEnum, PostingsEnum.ALL); assertNotNull(dpEnum); assertTrue(dpEnum.nextDoc() != DocIdSetIterator.NO_MORE_DOCS); assertEquals(1, dpEnum.freq()); assertEquals(101, dpEnum.nextPosition()); assertNull(termsEnum.next()); r.close(); dir.close(); }
Example #23
Source File: TestCollationDocValuesField.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testBasic() throws Exception { Directory dir = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), dir); Document doc = new Document(); Field field = newField("field", "", StringField.TYPE_STORED); CollationDocValuesField collationField = new CollationDocValuesField("collated", Collator.getInstance(Locale.ENGLISH)); doc.add(field); doc.add(collationField); field.setStringValue("ABC"); collationField.setStringValue("ABC"); iw.addDocument(doc); field.setStringValue("abc"); collationField.setStringValue("abc"); iw.addDocument(doc); IndexReader ir = iw.getReader(); iw.close(); IndexSearcher is = newSearcher(ir); SortField sortField = new SortField("collated", SortField.Type.STRING); TopDocs td = is.search(new MatchAllDocsQuery(), 5, new Sort(sortField)); assertEquals("abc", ir.document(td.scoreDocs[0].doc).get("field")); assertEquals("ABC", ir.document(td.scoreDocs[1].doc).get("field")); ir.close(); dir.close(); }
Example #24
Source File: NGramTestSetup.java From uyuni with GNU General Public License v2.0 | 5 votes |
public Hits performSearch(Directory dir, Analyzer alyz, String query) throws Exception { QueryParser parser = new QueryParser("name", alyz); IndexSearcher searcher = new IndexSearcher(dir); Query q = parser.parse(query); Hits hits = searcher.search(q); return hits; }
Example #25
Source File: TestTaxonomyFacetCounts.java From lucene-solr with Apache License 2.0 | 5 votes |
public void testCountRoot() throws Exception { // LUCENE-4882: FacetsAccumulator threw NPE if a FacetRequest was defined on CP.EMPTY Directory indexDir = newDirectory(); Directory taxoDir = newDirectory(); DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir); IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random()))); FacetsConfig config = new FacetsConfig(); for(int i = atLeast(30); i > 0; --i) { Document doc = new Document(); doc.add(new FacetField("a", "1")); doc.add(new FacetField("b", "1")); iw.addDocument(config.build(taxoWriter, doc)); } DirectoryReader r = DirectoryReader.open(iw); DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter); Facets facets = getAllFacets(FacetsConfig.DEFAULT_INDEX_FIELD_NAME, newSearcher(r), taxoReader, config); for (FacetResult result : facets.getAllDims(10)) { assertEquals(r.numDocs(), result.value.intValue()); } iw.close(); IOUtils.close(taxoWriter, taxoReader, taxoDir, r, indexDir); }
Example #26
Source File: TestPerFieldPostingsFormat2.java From lucene-solr with Apache License 2.0 | 5 votes |
private IndexWriter newWriter(Directory dir, IndexWriterConfig conf) throws IOException { LogDocMergePolicy logByteSizeMergePolicy = new LogDocMergePolicy(); logByteSizeMergePolicy.setNoCFSRatio(0.0); // make sure we use plain // files conf.setMergePolicy(logByteSizeMergePolicy); final IndexWriter writer = new IndexWriter(dir, conf); return writer; }
Example #27
Source File: SearchBuilder.java From taoshop with Apache License 2.0 | 5 votes |
public static void doSearch(String indexDir , String queryStr) throws IOException, ParseException, InvalidTokenOffsetsException { Directory directory = FSDirectory.open(Paths.get(indexDir)); DirectoryReader reader = DirectoryReader.open(directory); IndexSearcher searcher = new IndexSearcher(reader); Analyzer analyzer = new SmartChineseAnalyzer(); QueryParser parser = new QueryParser("tcontent",analyzer); Query query = parser.parse(queryStr); long startTime = System.currentTimeMillis(); TopDocs docs = searcher.search(query,10); System.out.println("查找"+queryStr+"所用时间:"+(System.currentTimeMillis()-startTime)); System.out.println("查询到"+docs.totalHits+"条记录"); //加入高亮显示的 SimpleHTMLFormatter simpleHTMLFormatter = new SimpleHTMLFormatter("<b><font color=red>","</font></b>"); QueryScorer scorer = new QueryScorer(query);//计算查询结果最高的得分 Fragmenter fragmenter = new SimpleSpanFragmenter(scorer);//根据得分算出一个片段 Highlighter highlighter = new Highlighter(simpleHTMLFormatter,scorer); highlighter.setTextFragmenter(fragmenter);//设置显示高亮的片段 //遍历查询结果 for(ScoreDoc scoreDoc : docs.scoreDocs){ Document doc = searcher.doc(scoreDoc.doc); System.out.println(doc.get("title")); String tcontent = doc.get("tcontent"); if(tcontent != null){ TokenStream tokenStream = analyzer.tokenStream("tcontent", new StringReader(tcontent)); String summary = highlighter.getBestFragment(tokenStream, tcontent); System.out.println(summary); } } reader.close(); }
Example #28
Source File: LuceneTestCase.java From lucene-solr with Apache License 2.0 | 5 votes |
/** * Returns a new Directory instance, using the specified random * with contents copied from the provided directory. See * {@link #newDirectory()} for more information. */ public static BaseDirectoryWrapper newDirectory(Random r, Directory d) throws IOException { Directory impl = newDirectoryImpl(r, TEST_DIRECTORY); for (String file : d.listAll()) { if (file.startsWith(IndexFileNames.SEGMENTS) || IndexFileNames.CODEC_FILE_PATTERN.matcher(file).matches()) { impl.copyFrom(d, file, file, newIOContext(r)); } } return wrapDirectory(r, impl, rarely(r), false); }
Example #29
Source File: TestDocValues.java From lucene-solr with Apache License 2.0 | 5 votes |
/** * field just doesnt have any docvalues at all: exception */ public void testMisconfiguredField() throws Exception { Directory dir = newDirectory(); IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(null)); Document doc = new Document(); doc.add(new StringField("foo", "bar", Field.Store.NO)); iw.addDocument(doc); DirectoryReader dr = DirectoryReader.open(iw); LeafReader r = getOnlyLeafReader(dr); // errors expectThrows(IllegalStateException.class, () -> { DocValues.getBinary(r, "foo"); }); expectThrows(IllegalStateException.class, () -> { DocValues.getNumeric(r, "foo"); }); expectThrows(IllegalStateException.class, () -> { DocValues.getSorted(r, "foo"); }); expectThrows(IllegalStateException.class, () -> { DocValues.getSortedSet(r, "foo"); }); expectThrows(IllegalStateException.class, () -> { DocValues.getSortedNumeric(r, "foo"); }); dr.close(); iw.close(); dir.close(); }
Example #30
Source File: DirectoryReader.java From lucene-solr with Apache License 2.0 | 5 votes |
/** Returns the directory this index resides in. */ public final Directory directory() { // Don't ensureOpen here -- in certain cases, when a // cloned/reopened reader needs to commit, it may call // this method on the closed original reader return directory; }