下面列出了org.apache.lucene.index.ConcurrentMergeScheduler#org.apache.lucene.index.IndexWriterConfig.OpenMode 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
@Override
public boolean delete(SearchDto searchDto) throws IOException {
IndexWriter writer = null;
try {
IndexWriterConfig conf = new IndexWriterConfig(new StandardAnalyzer());
conf.setOpenMode(OpenMode.CREATE_OR_APPEND);
writer = new IndexWriter(FSDirectory.open(Paths.get(settingCache.get(ISetting.S_LUCENE_DIR).getValue())), conf);
writer.deleteDocuments(new Term(ID, searchDto.getId()));
} catch (Exception e) {
e.printStackTrace();
stringCache.add(IConst.C_CACHE_ERROR_TIP, "Lucene删除异常,请联系管理员查看日志,错误信息:" + e.getMessage());
} finally {
if (writer != null) {
writer.close();
}
}
return true;
}
public void prepareIndex() throws IOException {
File globalWFMDIr = new File(Util.GTPM_INDEX_DIR);
if (!globalWFMDIr.exists()) {
Util.createDirs(Util.GTPM_INDEX_DIR);
}
KeywordAnalyzer keywordAnalyzer = new KeywordAnalyzer();
IndexWriterConfig wfmIndexWriterConfig = new IndexWriterConfig(Version.LUCENE_46, keywordAnalyzer);
wfmIndexWriterConfig.setOpenMode(OpenMode.CREATE_OR_APPEND);
wfmIndexWriterConfig.setRAMBufferSizeMB(1024);
logger.info("PREPARE INDEX");
try {
wfmIndexWriter = new IndexWriter(FSDirectory.open(new File(Util.GTPM_INDEX_DIR)), wfmIndexWriterConfig);
wfmIndexWriter.commit();
wfmIndexer = new DocumentMaker(wfmIndexWriter);
} catch (IOException e) {
e.printStackTrace();
}
}
public void testUpgradeWithNRTReader() throws Exception {
for (String name : oldNames) {
Directory dir = newDirectory(oldIndexDirs.get(name));
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
writer.addDocument(new Document());
DirectoryReader r = DirectoryReader.open(writer);
writer.commit();
r.close();
writer.forceMerge(1);
writer.commit();
writer.rollback();
SegmentInfos.readLatestCommit(dir);
dir.close();
}
}
@Override
public void initialize(UimaContext context)
throws ResourceInitializationException {
super.initialize(context);
try {
// create writer
Directory dir;
dir = FSDirectory.open(new File(INDEX_PATH));
Analyzer analyzer = getAnalyzer();
IndexWriterConfig iwc = new IndexWriterConfig(
Version.LUCENE_41, analyzer);
iwc.setOpenMode(OpenMode.CREATE);
indexWriter = new IndexWriter(dir, iwc);
} catch (IOException e) {
e.printStackTrace();
}
}
@Test
public void testCommit() throws Exception {
// Verifies that nothing is committed to the underlying Directory, if
// commit() wasn't called.
Directory dir = newDirectory();
DirectoryTaxonomyWriter ltw = new DirectoryTaxonomyWriter(dir, OpenMode.CREATE_OR_APPEND, NO_OP_CACHE);
assertFalse(DirectoryReader.indexExists(dir));
ltw.commit(); // first commit, so that an index will be created
ltw.addCategory(new FacetLabel("a"));
IndexReader r = DirectoryReader.open(dir);
assertEquals("No categories should have been committed to the underlying directory", 1, r.numDocs());
r.close();
ltw.close();
dir.close();
}
public void testOpenModeAndCreatedVersion() throws IOException {
assumeTrue("Reenable when 8.0 is released", false);
InputStream resource = getClass().getResourceAsStream("index.single-empty-doc.8.0.0.zip");
assertNotNull(resource);
Path path = createTempDir();
TestUtil.unzip(resource, path);
Directory dir = newFSDirectory(path);
for (OpenMode openMode : OpenMode.values()) {
Directory tmpDir = newDirectory(dir);
assertEquals(7 /** 7.0.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor());
IndexWriter w = new IndexWriter(tmpDir, newIndexWriterConfig().setOpenMode(openMode));
w.commit();
w.close();
switch (openMode) {
case CREATE:
assertEquals(Version.LATEST.major, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor());
break;
default:
assertEquals(7 /** 7.0.0 */, SegmentInfos.readLatestCommit(tmpDir).getIndexCreatedVersionMajor());
}
tmpDir.close();
}
dir.close();
}
@Inject
public LucenePerUserWaveViewHandlerImpl(IndexDirectory directory,
ReadableWaveletDataProvider waveletProvider,
@Named(CoreSettingsNames.WAVE_SERVER_DOMAIN) String domain,
@IndexExecutor Executor executor) {
this.waveletProvider = waveletProvider;
this.executor = executor;
analyzer = new StandardAnalyzer(LUCENE_VERSION);
try {
IndexWriterConfig indexConfig = new IndexWriterConfig(LUCENE_VERSION, analyzer);
indexConfig.setOpenMode(OpenMode.CREATE_OR_APPEND);
indexWriter = new IndexWriter(directory.getDirectory(), indexConfig);
nrtManager = new NRTManager(indexWriter, new WaveSearchWarmer(domain));
} catch (IOException ex) {
throw new IndexException(ex);
}
nrtManagerReopenThread = new NRTManagerReopenThread(nrtManager, MAX_STALE_SEC, MIN_STALE_SEC);
nrtManagerReopenThread.start();
}
public void testStressLocks() throws Exception {
Path tempPath = createTempDir();
assumeFalse("cannot handle buggy Files.delete", TestUtil.hasWindowsFS(tempPath));
Directory dir = getDirectory(tempPath);
// First create a 1 doc index:
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
addDoc(w);
w.close();
int numIterations = atLeast(20);
WriterThread writer = new WriterThread(numIterations, dir);
SearcherThread searcher = new SearcherThread(numIterations, dir);
writer.start();
searcher.start();
writer.join();
searcher.join();
assertTrue("IndexWriter hit unexpected exceptions", !writer.hitException);
assertTrue("IndexSearcher hit unexpected exceptions", !searcher.hitException);
dir.close();
}
/**
* {@link PersistentSnapshotDeletionPolicy} wraps another
* {@link IndexDeletionPolicy} to enable flexible snapshotting.
*
* @param primary
* the {@link IndexDeletionPolicy} that is used on non-snapshotted
* commits. Snapshotted commits, by definition, are not deleted until
* explicitly released via {@link #release}.
* @param dir
* the {@link Directory} which will be used to persist the snapshots
* information.
* @param mode
* specifies whether a new index should be created, deleting all
* existing snapshots information (immediately), or open an existing
* index, initializing the class with the snapshots information.
*/
public PersistentSnapshotDeletionPolicy(IndexDeletionPolicy primary,
Directory dir, OpenMode mode) throws IOException {
super(primary);
this.dir = dir;
if (mode == OpenMode.CREATE) {
clearPriorSnapshots();
}
loadPriorSnapshots();
if (mode == OpenMode.APPEND && nextWriteGen == 0) {
throw new IllegalStateException("no snapshots stored in this directory");
}
}
@Test
public void testMergeUnusedPerFieldCodec() throws IOException {
Directory dir = newDirectory();
IndexWriterConfig iwconf = newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE).setCodec(new MockCodec());
IndexWriter writer = newWriter(dir, iwconf);
addDocs(writer, 10);
writer.commit();
addDocs3(writer, 10);
writer.commit();
addDocs2(writer, 10);
writer.commit();
assertEquals(30, writer.getDocStats().maxDoc);
TestUtil.checkIndex(dir);
writer.forceMerge(1);
assertEquals(30, writer.getDocStats().maxDoc);
writer.close();
dir.close();
}
public void createRandomTerms(int nDocs, int nTerms, double power, Directory dir) throws Exception {
int[] freq = new int[nTerms];
Term[] terms = new Term[nTerms];
for (int i=0; i<nTerms; i++) {
int f = (nTerms+1)-i; // make first terms less frequent
freq[i] = (int)Math.ceil(Math.pow(f,power));
terms[i] = new Term("f",Character.toString((char)('A'+i)));
}
IndexWriter iw = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.CREATE));
for (int i=0; i<nDocs; i++) {
Document d = new Document();
for (int j=0; j<nTerms; j++) {
if (random().nextInt(freq[j]) == 0) {
d.add(newStringField("f", terms[j].text(), Field.Store.NO));
//System.out.println(d);
}
}
iw.addDocument(d);
}
iw.forceMerge(1);
iw.close();
}
@Test
public void testGetCommitData() throws Exception {
Directory dir = newDirectory();
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(null));
writer.setLiveCommitData(new HashMap<String,String>() {{
put("key", "value");
}}.entrySet());
assertEquals("value", getLiveCommitData(writer).get("key"));
writer.close();
// validate that it's also visible when opening a new IndexWriter
writer = new IndexWriter(dir, newIndexWriterConfig(null)
.setOpenMode(OpenMode.APPEND));
assertEquals("value", getLiveCommitData(writer).get("key"));
writer.close();
dir.close();
}
public void changeIndexNoAdds(Random random, Directory dir) throws IOException {
// make sure searching sees right # hits
DirectoryReader reader = DirectoryReader.open(dir);
IndexSearcher searcher = newSearcher(reader);
ScoreDoc[] hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs;
assertEquals("wrong number of hits", 34, hits.length);
Document d = searcher.doc(hits[0].doc);
assertEquals("wrong first document", "0", d.get("id"));
reader.close();
// fully merge
IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig(new MockAnalyzer(random))
.setOpenMode(OpenMode.APPEND));
writer.forceMerge(1);
writer.close();
reader = DirectoryReader.open(dir);
searcher = newSearcher(reader);
hits = searcher.search(new TermQuery(new Term("content", "aaa")), 1000).scoreDocs;
assertEquals("wrong number of hits", 34, hits.length);
doTestHits(hits, 34, searcher.getIndexReader());
reader.close();
}
public void testIsCurrent() throws Exception {
Directory d = newDirectory();
IndexWriter writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random())));
addDocumentWithFields(writer);
writer.close();
// set up reader:
DirectoryReader reader = DirectoryReader.open(d);
assertTrue(reader.isCurrent());
// modify index by adding another document:
writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
addDocumentWithFields(writer);
writer.close();
assertFalse(reader.isCurrent());
// re-create index:
writer = new IndexWriter(d, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.CREATE));
addDocumentWithFields(writer);
writer.close();
assertFalse(reader.isCurrent());
reader.close();
d.close();
}
private void fillIndex(Random random, Directory dir, int start, int numDocs) throws IOException {
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(new MockAnalyzer(random))
.setOpenMode(OpenMode.CREATE)
.setMaxBufferedDocs(2)
.setMergePolicy(newLogMergePolicy(2))
);
for (int i = start; i < (start + numDocs); i++)
{
Document temp = new Document();
temp.add(newStringField("count", (""+i), Field.Store.YES));
writer.addDocument(temp);
}
writer.close();
}
public static Indexer create(String indexDirPath) {
Directory indexDirectory = null;
try {
indexDirectory = FSDirectory.open(new File(indexDirPath).toPath());
IndexWriterConfig config = new IndexWriterConfig();
config.setOpenMode(OpenMode.CREATE);
IndexWriter indexWriter = new IndexWriter(indexDirectory, config);
return new Indexer(indexDirectory, indexWriter);
} catch (IOException e) {
LOGGER.error("Exception while trying to create index writer for entity checking. Returning null.", e);
IOUtils.closeQuietly(indexDirectory);
return null;
}
}
public static void main(String[] args) throws Exception {
// NLPIR
NLPIRTokenizerAnalyzer nta = new NLPIRTokenizerAnalyzer("", 1, "", "", false);
// Index
IndexWriterConfig inconf = new IndexWriterConfig(nta);
inconf.setOpenMode(OpenMode.CREATE_OR_APPEND);
IndexWriter index = new IndexWriter(FSDirectory.open(Paths.get("index/")), inconf);
Document doc = new Document();
doc.add(new TextField("contents",
"特朗普表示,很高兴汉堡会晤后再次同习近平主席通话。我同习主席就重大问题保持沟通和协调、两国加强各层级和各领域交往十分重要。当前,美中关系发展态势良好,我相信可以发展得更好。我期待着对中国进行国事访问。",
Field.Store.YES));
index.addDocument(doc);
index.flush();
index.close();
// Search
String field = "contents";
IndexReader reader = DirectoryReader.open(FSDirectory.open(Paths.get("index/")));
IndexSearcher searcher = new IndexSearcher(reader);
QueryParser parser = new QueryParser(field, nta);
Query query = parser.parse("特朗普习近平");
TopDocs top = searcher.search(query, 100);
System.out.println("总条数:" + top.totalHits);
ScoreDoc[] hits = top.scoreDocs;
for (int i = 0; i < hits.length; i++) {
System.out.println("doc=" + hits[i].doc + " score=" + hits[i].score);
Document d = searcher.doc(hits[i].doc);
System.out.println(d.get("contents"));
}
}
/**
* Creates an empty collection to get it up and running
*/
public synchronized void create( boolean _errorOnExists ) throws IOException {
setDirectory();
if ( directory.listAll().length > 2 ) {
if ( _errorOnExists ) {
throw new IOException( "directory not empty; possible collection already present" );
}else {
if ( DirectoryReader.indexExists( directory ) ) {
return;
}// otherwise an index doesn't exist so allow the creation code to execute
}
}
IndexWriterConfig iwc = new IndexWriterConfig( AnalyzerFactory.get(language) );
iwc.setOpenMode( OpenMode.CREATE );
indexwriter = new IndexWriter(directory, iwc);
indexwriter.commit();
indexwriter.close();
indexwriter = null;
// throw an openbd.create file in there so we know when it was created
created = System.currentTimeMillis();
File touchFile = new File( collectionpath, "openbd.created" );
Writer fw = new FileWriter( touchFile );
fw.close();
}
/**
* Removes all terms from the auto complete index.
* @throws IOException
* @throws AlreadyClosedException if the Autocompleter is already closed
*/
public void clearIndex() throws IOException {
synchronized (modifyCurrentIndexLock) {
ensureOpen();
final Directory dir = this.autoCompleteIndex;
final IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(
Version.LUCENE_CURRENT,
new WhitespaceAnalyzer(Version.LUCENE_CURRENT))
.setOpenMode(OpenMode.CREATE));
writer.close();
swapSearcher(dir);
}
}
protected void make1dmfIndex( Analyzer analyzer, String... values ) throws Exception {
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(analyzer).setOpenMode(OpenMode.CREATE));
Document doc = new Document();
FieldType customType = new FieldType(TextField.TYPE_STORED);
customType.setStoreTermVectors(true);
customType.setStoreTermVectorOffsets(true);
customType.setStoreTermVectorPositions(true);
for( String value: values ) {
doc.add( new Field( F, value, customType) );
}
writer.addDocument( doc );
writer.close();
if (reader != null) reader.close();
reader = DirectoryReader.open(dir);
}
public void testNoExtraFiles() throws IOException {
Directory directory = newDirectory();
IndexWriter writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random()))
.setMaxBufferedDocs(2));
for(int iter=0;iter<7;iter++) {
if (VERBOSE) {
System.out.println("TEST: iter=" + iter);
}
for(int j=0;j<21;j++) {
Document doc = new Document();
doc.add(newTextField("content", "a b c", Field.Store.NO));
writer.addDocument(doc);
}
writer.close();
TestIndexWriter.assertNoUnreferencedFiles(directory, "testNoExtraFiles");
// Reopen
writer = new IndexWriter(directory, newIndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(2));
}
writer.close();
directory.close();
}
@Override
public int doLogic() throws IOException {
PerfRunData runData = getRunData();
Config config = runData.getConfig();
runData.setIndexWriter(configureWriter(config, runData, OpenMode.CREATE, null));
return 1;
}
@Override
public int doLogic() throws IOException {
PerfRunData runData = getRunData();
Config config = runData.getConfig();
final IndexCommit ic;
if (commitUserData != null) {
ic = OpenReaderTask.findIndexCommit(runData.getDirectory(), commitUserData);
} else {
ic = null;
}
final IndexWriter writer = CreateIndexTask.configureWriter(config, runData, OpenMode.APPEND, ic);
runData.setIndexWriter(writer);
return 1;
}
/**
* Test index creation logic
*/
public void testIndexAndSearchTasks() throws Exception {
// 1. alg definition (required in every "logic" test)
String algLines[] = {
"ResetSystemErase",
"CreateIndex",
"{ AddDoc } : 1000",
"ForceMerge(1)",
"CloseIndex",
"OpenReader",
"{ CountingSearchTest } : 200",
"CloseReader",
"[ CountingSearchTest > : 70",
"[ CountingSearchTest > : 9",
};
// 2. we test this value later
CountingSearchTestTask.numSearches = 0;
// 3. execute the algorithm (required in every "logic" test)
Benchmark benchmark = execBenchmark(algLines);
// 4. test specific checks after the benchmark run completed.
assertEquals("TestSearchTask was supposed to be called!",279,CountingSearchTestTask.numSearches);
assertTrue("Index does not exist?...!", DirectoryReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(),
new IndexWriterConfig(new MockAnalyzer(random()))
.setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory());
assertEquals("1000 docs were added to the index, this is what we expect to find!",1000,ir.numDocs());
ir.close();
}
/**
* Test Exhasting Doc Maker logic
*/
public void testExhaustContentSource() throws Exception {
// 1. alg definition (required in every "logic" test)
String algLines[] = {
"# ----- properties ",
"content.source=org.apache.lucene.benchmark.byTask.feeds.SingleDocSource",
"content.source.log.step=1",
"doc.term.vector=false",
"content.source.forever=false",
"directory=ByteBuffersDirectory",
"doc.stored=false",
"doc.tokenized=false",
"# ----- alg ",
"CreateIndex",
"{ AddDoc } : * ",
"ForceMerge(1)",
"CloseIndex",
"OpenReader",
"{ CountingSearchTest } : 100",
"CloseReader",
"[ CountingSearchTest > : 30",
"[ CountingSearchTest > : 9",
};
// 2. we test this value later
CountingSearchTestTask.numSearches = 0;
// 3. execute the algorithm (required in every "logic" test)
Benchmark benchmark = execBenchmark(algLines);
// 4. test specific checks after the benchmark run completed.
assertEquals("TestSearchTask was supposed to be called!",139,CountingSearchTestTask.numSearches);
assertTrue("Index does not exist?...!", DirectoryReader.indexExists(benchmark.getRunData().getDirectory()));
// now we should be able to open the index for write.
IndexWriter iw = new IndexWriter(benchmark.getRunData().getDirectory(), new IndexWriterConfig(new MockAnalyzer(random())).setOpenMode(OpenMode.APPEND));
iw.close();
IndexReader ir = DirectoryReader.open(benchmark.getRunData().getDirectory());
assertEquals("1 docs were added to the index, this is what we expect to find!",1,ir.numDocs());
ir.close();
}
@Override
protected IndexWriterConfig createIndexWriterConfig(OpenMode openMode) {
IndexWriterConfig conf = super.createIndexWriterConfig(openMode);
sdp = new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy());
conf.setIndexDeletionPolicy(sdp);
return conf;
}
@Test
public void testDefault() throws Exception {
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
// create and open an index writer
RandomIndexWriter iw = new RandomIndexWriter(random(), indexDir, newIndexWriterConfig(
new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false)));
// create and open a taxonomy writer
TaxonomyWriter tw = new DirectoryTaxonomyWriter(taxoDir, OpenMode.CREATE);
FacetsConfig config = getConfig();
seedIndex(tw, iw, config);
IndexReader ir = iw.getReader();
tw.commit();
// prepare index reader and taxonomy.
TaxonomyReader tr = new DirectoryTaxonomyReader(taxoDir);
// prepare searcher to search against
IndexSearcher searcher = newSearcher(ir);
FacetsCollector sfc = performSearch(tr, ir, searcher);
// Obtain facets results and hand-test them
assertCorrectResults(getTaxonomyFacetCounts(tr, config, sfc));
assertOrdinalsExist("$facets", ir);
iw.close();
IOUtils.close(tr, ir, tw, indexDir, taxoDir);
}
/**
* A constructor.
*
* @param dir The directory where the snapshot meta-data is stored.
* @param mode CREATE If previous meta-data should be erased.
* APPEND If previous meta-data should be read and updated.
* CREATE_OR_APPEND Creates a new meta-data structure if one does not exist
* Updates the existing structure if one exists.
* @throws IOException in case of errors.
*/
public SolrSnapshotMetaDataManager(SolrCore solrCore, Directory dir, OpenMode mode) throws IOException {
this.solrCore = solrCore;
this.dir = dir;
if (mode == OpenMode.CREATE) {
deleteSnapshotMetadataFiles();
}
loadFromSnapshotMetadataFile();
if (mode == OpenMode.APPEND && nextWriteGen == 0) {
throw new IllegalStateException("no snapshots stored in this directory");
}
}
private static IndexWriter getWriter (Directory directory)
throws IOException
{
MergePolicy policy = new LogByteSizeMergePolicy();
IndexWriterConfig conf = new IndexWriterConfig(new MockAnalyzer(random()));
conf.setMergePolicy(policy);
conf.setOpenMode(OpenMode.CREATE_OR_APPEND);
IndexWriter writer = new IndexWriter(directory, conf);
return writer;
}
void addDocs(final Random random, Directory dir, final int ndocs, String field, final String val, final int maxTF, final float percentDocs) throws IOException {
Analyzer analyzer = new Analyzer() {
@Override
public TokenStreamComponents createComponents(String fieldName) {
return new TokenStreamComponents(new RepeatingTokenizer(val, random, percentDocs, maxTF));
}
};
Document doc = new Document();
doc.add(newStringField(field, val, Field.Store.NO));
IndexWriter writer = new IndexWriter(
dir,
newIndexWriterConfig(analyzer)
.setOpenMode(OpenMode.CREATE)
.setMaxBufferedDocs(100)
.setMergePolicy(newLogMergePolicy(100))
);
for (int i=0; i<ndocs; i++) {
writer.addDocument(doc);
}
writer.forceMerge(1);
writer.close();
}