org.apache.lucene.index.ConcurrentMergeScheduler#org.apache.lucene.util.NamedThreadFactory源码实例Demo

下面列出了org.apache.lucene.index.ConcurrentMergeScheduler#org.apache.lucene.util.NamedThreadFactory 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

public PartitionedWriteAheadEventStore(final RepositoryConfiguration repoConfig, final RecordWriterFactory recordWriterFactory,
    final RecordReaderFactory recordReaderFactory, final EventReporter eventReporter, final EventFileManager fileManager) {
    super(repoConfig, eventReporter);
    this.repoConfig = repoConfig;
    this.eventReporter = eventReporter;
    this.filesToCompress = new LinkedBlockingQueue<>(100);
    final AtomicLong idGenerator = new AtomicLong(0L);
    this.partitions = createPartitions(repoConfig, recordWriterFactory, recordReaderFactory, idGenerator);
    this.fileManager = fileManager;

    // Creates tasks to compress data on rollover
    if (repoConfig.isCompressOnRollover()) {
        compressionExecutor = Executors.newFixedThreadPool(repoConfig.getIndexThreadPoolSize(), new NamedThreadFactory("Compress Provenance Logs"));
    } else {
        compressionExecutor = null;
    }
}
 
源代码2 项目: lucene-solr   文件: TestLeakFS.java
/** Test leaks via AsynchronousFileChannel.open */
public void testLeakAsyncFileChannel() throws IOException, InterruptedException {
  Path dir = wrap(createTempDir());
  
  OutputStream file = Files.newOutputStream(dir.resolve("stillopen"));
  file.write(5);
  file.close();

  ExecutorService executorService = Executors.newFixedThreadPool(1,
      new NamedThreadFactory("async-io"));
  try {
    AsynchronousFileChannel leak = AsynchronousFileChannel.open(dir.resolve("stillopen"),
        Collections.emptySet(), executorService);
    Exception e = expectThrows(Exception.class, () -> dir.getFileSystem().close());
    assertTrue(e.getMessage().contains("file handle leaks"));
    leak.close();
  } finally {
    executorService.shutdown();
    executorService.awaitTermination(5, TimeUnit.SECONDS);
  }
}
 
源代码3 项目: lucene-solr   文件: TestVerboseFS.java
/** Test AsynchronousFileChannel.open */
public void testAsyncFileChannel() throws IOException, InterruptedException {
  InfoStreamListener stream = new InfoStreamListener("newAsynchronousFileChannel");
  Path dir = wrap(createTempDir(), stream);

  ExecutorService executorService = Executors.newFixedThreadPool(1,
      new NamedThreadFactory("async-io"));
  try {
    Set<StandardOpenOption> opts = Set
        .of(StandardOpenOption.CREATE_NEW, StandardOpenOption.READ,
            StandardOpenOption.WRITE);
    AsynchronousFileChannel channel = AsynchronousFileChannel
        .open(dir.resolve("foobar"), opts, executorService);
    assertTrue(stream.sawMessage());
    channel.close();

    expectThrows(IOException.class, () -> AsynchronousFileChannel.open(dir.resolve("foobar"),
        opts, executorService));

    expectThrows(NoSuchFileException.class,
        () -> AsynchronousFileChannel.open(dir.resolve("doesNotExist.rip")));
  } finally {
    executorService.shutdown();
    executorService.awaitTermination(5, TimeUnit.SECONDS);
  }
}
 
源代码4 项目: lucene-solr   文件: Monitor.java
/**
 * Create a new Monitor instance
 *
 * @param analyzer      to analyze {@link Document}s at match time
 * @param presearcher   the presearcher to use
 * @param configuration the configuration
 */
public Monitor(Analyzer analyzer, Presearcher presearcher,
               MonitorConfiguration configuration) throws IOException {

  this.analyzer = analyzer;
  this.presearcher = presearcher;
  this.queryIndex = new QueryIndex(configuration, presearcher);

  long purgeFrequency = configuration.getPurgeFrequency();
  this.purgeExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("cache-purge"));
  this.purgeExecutor.scheduleAtFixedRate(() -> {
    try {
      purgeCache();
    } catch (Throwable e) {
      listeners.forEach(l -> l.onPurgeError(e));
    }
  }, purgeFrequency, purgeFrequency, configuration.getPurgeFrequencyUnits());

  this.commitBatchSize = configuration.getQueryUpdateBufferSize();
}
 
源代码5 项目: lucene-solr   文件: ConcurrentMatcherTestBase.java
public void testAllMatchesAreCollected() throws Exception {

    ExecutorService executor = Executors.newFixedThreadPool(10, new NamedThreadFactory("matchers"));
    try (Monitor monitor = new Monitor(ANALYZER)) {
      List<MonitorQuery> queries = new ArrayList<>();
      for (int i = 0; i < 1000; i++) {
        queries.add(new MonitorQuery(Integer.toString(i), MonitorTestBase.parse("+test " + i)));
      }
      monitor.register(queries);

      Document doc = new Document();
      doc.add(newTextField("field", "test", Field.Store.NO));

      MatchingQueries<QueryMatch> matches
          = monitor.match(doc, matcherFactory(executor, QueryMatch.SIMPLE_MATCHER, 10));

      assertEquals(1000, matches.getMatchCount());
    }
    finally {
      executor.shutdown();
    }
  }
 
源代码6 项目: lucene-solr   文件: TestIndexSearcher.java
public void testGetSlices() throws Exception {
  assertNull(new IndexSearcher(new MultiReader()).getSlices());

  Directory dir = newDirectory();
  RandomIndexWriter w = new RandomIndexWriter(random(), dir);
  w.addDocument(new Document());
  IndexReader r = w.getReader();
  w.close();

  ExecutorService service = new ThreadPoolExecutor(4, 4, 0L, TimeUnit.MILLISECONDS,
                                 new LinkedBlockingQueue<Runnable>(),
                                 new NamedThreadFactory("TestIndexSearcher"));
  IndexSearcher s = new IndexSearcher(r, service);
  IndexSearcher.LeafSlice[] slices = s.getSlices();
  assertNotNull(slices);
  assertEquals(1, slices.length);
  assertEquals(1, slices[0].leaves.length);
  assertTrue(slices[0].leaves[0] == r.leaves().get(0));
  service.shutdown();
  IOUtils.close(r, dir);
}
 
源代码7 项目: lucene-solr   文件: TestTopFieldCollector.java
private TopDocs doConcurrentSearchWithThreshold(int numResults, int threshold, Query q, Sort sort, IndexReader indexReader) throws IOException {
  ExecutorService service = new ThreadPoolExecutor(4, 4, 0L, TimeUnit.MILLISECONDS,
      new LinkedBlockingQueue<Runnable>(),
      new NamedThreadFactory("TestTopDocsCollector"));
  try {
    IndexSearcher searcher = new IndexSearcher(indexReader, service);

    CollectorManager<TopFieldCollector,TopFieldDocs> collectorManager = TopFieldCollector.createSharedManager(sort, numResults,
        null, threshold);

    TopDocs tdc = searcher.search(q, collectorManager);

    return tdc;
  } finally {
    service.shutdown();
  }
}
 
源代码8 项目: nifi   文件: PartitionedWriteAheadEventStore.java
public PartitionedWriteAheadEventStore(final RepositoryConfiguration repoConfig, final RecordWriterFactory recordWriterFactory,
    final RecordReaderFactory recordReaderFactory, final EventReporter eventReporter, final EventFileManager fileManager) {
    super(repoConfig, eventReporter);
    this.repoConfig = repoConfig;
    this.eventReporter = eventReporter;
    this.filesToCompress = new LinkedBlockingQueue<>(100);
    final AtomicLong idGenerator = new AtomicLong(0L);
    this.fileManager = fileManager;
    this.partitions = createPartitions(repoConfig, recordWriterFactory, recordReaderFactory, idGenerator);

    // Creates tasks to compress data on rollover
    if (repoConfig.isCompressOnRollover()) {
        compressionExecutor = Executors.newFixedThreadPool(repoConfig.getIndexThreadPoolSize(), new NamedThreadFactory("Compress Provenance Logs"));
    } else {
        compressionExecutor = null;
    }
}
 
源代码9 项目: localization_nifi   文件: PartitionedEventStore.java
@Override
public void initialize() throws IOException {
    maintenanceExecutor = Executors.newScheduledThreadPool(1, new NamedThreadFactory("Provenance Repository Maintenance"));
    maintenanceExecutor.scheduleWithFixedDelay(() -> performMaintenance(), 1, 1, TimeUnit.MINUTES);

    for (final EventStorePartition partition : getPartitions()) {
        partition.initialize();
    }
}
 
源代码10 项目: lucene-solr   文件: AnalysisPanelProvider.java
public AnalysisPanelProvider() throws IOException {
  this.preset = new PresetAnalyzerPanelProvider().get();
  this.custom = new CustomAnalyzerPanelProvider().get();

  this.operatorRegistry = ComponentOperatorRegistry.getInstance();
  this.analysisChainDialogFactory = AnalysisChainDialogFactory.getInstance();
  this.tokenAttrDialogFactory = TokenAttributeDialogFactory.getInstance();
  this.messageBroker = MessageBroker.getInstance();

  this.analysisModel = new AnalysisFactory().newInstance();
  analysisModel.createAnalyzerFromClassName(StandardAnalyzer.class.getName());

  this.simpleResult = new SimpleAnalyzeResultPanelProvider(tokenAttrDialogFactory).get();
  this.stepByStepResult = new StepByStepAnalyzeResultPanelProvider(tokenAttrDialogFactory).get();

  operatorRegistry.register(AnalysisTabOperator.class, this);

  operatorRegistry.get(PresetAnalyzerPanelOperator.class).ifPresent(operator -> {
    // Scanning all Analyzer types will take time...
    ExecutorService executorService =
        Executors.newFixedThreadPool(1, new NamedThreadFactory("load-preset-analyzer-types"));
    executorService.execute(() -> {
      operator.setPresetAnalyzers(analysisModel.getPresetAnalyzerTypes());
      operator.setSelectedAnalyzer(analysisModel.currentAnalyzer().getClass());
    });
    executorService.shutdown();
  });
}
 
源代码11 项目: lucene-solr   文件: TestSortedSetDocValuesFacets.java
private ExecutorService randomExecutorServiceOrNull() {
  if (random().nextBoolean()) {
    return null;
  } else {
    return new ThreadPoolExecutor(1, TestUtil.nextInt(random(), 2, 6), Long.MAX_VALUE, TimeUnit.MILLISECONDS,
                                  new LinkedBlockingQueue<Runnable>(),
                                  new NamedThreadFactory("TestIndexSearcher"));
  }
}
 
源代码12 项目: lucene-solr   文件: ConcurrentQueryLoader.java
/**
 * Create a new ConcurrentQueryLoader
 *
 * @param monitor   the Monitor to load queries to
 * @param threads   the number of threads to use
 * @param queueSize the size of the buffer to hold queries in
 */
public ConcurrentQueryLoader(Monitor monitor, int threads, int queueSize) {
  this.monitor = monitor;
  this.queue = new LinkedBlockingQueue<>(queueSize);
  this.executor = Executors.newFixedThreadPool(threads, new NamedThreadFactory("loader"));
  this.shutdownLatch = new CountDownLatch(threads);
  for (int i = 0; i < threads; i++) {
    this.executor.submit(new Worker(queueSize / threads));
  }
}
 
源代码13 项目: lucene-solr   文件: TestIndexSearcher.java
public void testQueueSizeBasedSliceExecutor() throws Exception {
  ThreadPoolExecutor service = new ThreadPoolExecutor(4, 4, 0L, TimeUnit.MILLISECONDS,
      new LinkedBlockingQueue<Runnable>(),
      new NamedThreadFactory("TestIndexSearcher"));

  runSliceExecutorTest(service, false);

  TestUtil.shutdownExecutorService(service);
}
 
源代码14 项目: lucene-solr   文件: TestIndexSearcher.java
public void testRandomBlockingSliceExecutor() throws Exception {
  ThreadPoolExecutor service = new ThreadPoolExecutor(4, 4, 0L, TimeUnit.MILLISECONDS,
      new LinkedBlockingQueue<Runnable>(),
      new NamedThreadFactory("TestIndexSearcher"));

  runSliceExecutorTest(service, true);

  TestUtil.shutdownExecutorService(service);
}
 
源代码15 项目: lucene-solr   文件: TestTopDocsCollector.java
private TopDocs doConcurrentSearchWithThreshold(int numResults, int threshold, Query q, IndexReader indexReader) throws IOException {
  ExecutorService service = new ThreadPoolExecutor(4, 4, 0L, TimeUnit.MILLISECONDS,
      new LinkedBlockingQueue<Runnable>(),
      new NamedThreadFactory("TestTopDocsCollector"));
  try {
    IndexSearcher searcher = new IndexSearcher(indexReader, service);

    CollectorManager<TopScoreDocCollector,TopDocs> collectorManager = TopScoreDocCollector.createSharedManager(numResults,
        null, threshold);

    return searcher.search(q, collectorManager);
  } finally {
    service.shutdown();
  }
}
 
源代码16 项目: lucene-solr   文件: TestTopFieldCollector.java
public void testSharedHitcountCollector() throws Exception {

    ExecutorService service = new ThreadPoolExecutor(4, 4, 0L, TimeUnit.MILLISECONDS,
        new LinkedBlockingQueue<Runnable>(),
        new NamedThreadFactory("TestTopFieldCollector"));

    IndexSearcher concurrentSearcher = new IndexSearcher(ir, service);

    // Two Sort criteria to instantiate the multi/single comparators.
    Sort[] sort = new Sort[] {new Sort(SortField.FIELD_DOC), new Sort() };
    for(int i = 0; i < sort.length; i++) {
      Query q = new MatchAllDocsQuery();
      TopDocsCollector<Entry> tdc = TopFieldCollector.create(sort[i], 10, Integer.MAX_VALUE);

      is.search(q, tdc);

      CollectorManager<TopFieldCollector,TopFieldDocs> tsdc = TopFieldCollector.createSharedManager(sort[i], 10, null, Integer.MAX_VALUE);

      TopDocs td = tdc.topDocs();
      TopDocs td2 = concurrentSearcher.search(q, tsdc);
      ScoreDoc[] sd = td.scoreDocs;
      for(int j = 0; j < sd.length; j++) {
        assertTrue(Float.isNaN(sd[j].score));
      }

      CheckHits.checkEqual(q, td.scoreDocs, td2.scoreDocs);
    }

    service.shutdown();
  }
 
源代码17 项目: lucene-solr   文件: TestSegmentToThreadMapping.java
public void testIntraSliceDocIDOrder() throws Exception {
  Directory dir = newDirectory();
  RandomIndexWriter w = new RandomIndexWriter(random(), dir);
  w.addDocument(new Document());
  w.addDocument(new Document());
  w.commit();
  w.addDocument(new Document());
  w.addDocument(new Document());
  w.commit();
  IndexReader r = w.getReader();
  w.close();

  ExecutorService service = new ThreadPoolExecutor(4, 4, 0L, TimeUnit.MILLISECONDS,
      new LinkedBlockingQueue<Runnable>(),
      new NamedThreadFactory("TestSegmentToThreadMapping"));
  IndexSearcher s = new IndexSearcher(r, service);
  Query query = new MatchAllDocsQuery();

  s.search(query, Integer.MAX_VALUE);

  IndexSearcher.LeafSlice[] slices = s.getSlices();
  assertNotNull(slices);

  for (IndexSearcher.LeafSlice leafSlice : slices) {
    LeafReaderContext[] leafReaderContexts = leafSlice.leaves;
    int previousDocBase = leafReaderContexts[0].docBase;

    for (LeafReaderContext leafReaderContext : leafReaderContexts) {
      assertTrue(previousDocBase <= leafReaderContext.docBase);
      previousDocBase = leafReaderContext.docBase;
    }
  }

  service.shutdown();
  IOUtils.close(r, dir);
}
 
源代码18 项目: BioSolr   文件: ElasticOntologyHelperFactory.java
public OntologyHelper buildOntologyHelper() throws OntologyHelperException {
	return new OntologyHelperBuilder()
			.ontologyUri(settings.getOntologyUri())
			.labelPropertyUris(convertListToArray(settings.getLabelPropertyUris()))
			.synonymPropertyUris(convertListToArray(settings.getSynonymPropertyUris()))
			.definitionPropertyUris(convertListToArray(settings.getDefinitionPropertyUris()))
			.olsBaseUrl(settings.getOlsBaseUrl())
			.ontology(settings.getOlsOntology())
			.threadpoolSize(settings.getThreadpoolSize())
			.pageSize(settings.getPageSize())
			.threadFactory(new NamedThreadFactory("olsOntologyHelper"))
			.build();
}
 
源代码19 项目: nifi   文件: PartitionedEventStore.java
@Override
public void initialize() throws IOException {
    maintenanceExecutor = Executors.newScheduledThreadPool(1, new NamedThreadFactory("Provenance Repository Maintenance"));
    final long maintenanceMillis = repoConfig.getMaintenanceFrequency(TimeUnit.MILLISECONDS);
    maintenanceExecutor.scheduleWithFixedDelay(this::performMaintenance, maintenanceMillis, maintenanceMillis, TimeUnit.MILLISECONDS);

    for (final EventStorePartition partition : getPartitions()) {
        partition.initialize();
    }
}
 
源代码20 项目: lucene-solr   文件: OpenIndexDialogFactory.java
private void initialize() {
  idxPathCombo.setPreferredSize(new Dimension(360, 40));

  browseBtn.setText(FontUtils.elegantIconHtml("&#x6e;", MessageUtils.getLocalizedMessage("button.browse")));
  browseBtn.setFont(StyleConstants.FONT_BUTTON_LARGE);
  browseBtn.setPreferredSize(new Dimension(120, 40));
  browseBtn.addActionListener(listeners::browseDirectory);

  readOnlyCB.setText(MessageUtils.getLocalizedMessage("openindex.checkbox.readonly"));
  readOnlyCB.setSelected(prefs.isReadOnly());
  readOnlyCB.addActionListener(listeners::toggleReadOnly);
  readOnlyCB.setOpaque(false);

  // Scanning all Directory types will take time...
  ExecutorService executorService = Executors.newFixedThreadPool(1, new NamedThreadFactory("load-directory-types"));
  executorService.execute(() -> {
    for (String clazzName : supportedDirImpls()) {
      dirImplCombo.addItem(clazzName);
    }
  });
  executorService.shutdown();
  dirImplCombo.setPreferredSize(new Dimension(350, 30));
  dirImplCombo.setSelectedItem(prefs.getDirImpl());

  noReaderCB.setText(MessageUtils.getLocalizedMessage("openindex.checkbox.no_reader"));
  noReaderCB.setSelected(prefs.isNoReader());
  noReaderCB.setOpaque(false);

  useCompoundCB.setText(MessageUtils.getLocalizedMessage("openindex.checkbox.use_compound"));
  useCompoundCB.setSelected(prefs.isUseCompound());
  useCompoundCB.setOpaque(false);

  keepLastCommitRB.setText(MessageUtils.getLocalizedMessage("openindex.radio.keep_only_last_commit"));
  keepLastCommitRB.setSelected(!prefs.isKeepAllCommits());
  keepLastCommitRB.setOpaque(false);

  keepAllCommitsRB.setText(MessageUtils.getLocalizedMessage("openindex.radio.keep_all_commits"));
  keepAllCommitsRB.setSelected(prefs.isKeepAllCommits());
  keepAllCommitsRB.setOpaque(false);

}
 
源代码21 项目: lucene-solr   文件: TestSpellChecker.java
public void testConcurrentAccess() throws IOException, InterruptedException {
  assertEquals(1, searchers.size());
  final IndexReader r = DirectoryReader.open(userindex);
  spellChecker.clearIndex();
  assertEquals(2, searchers.size());
  addwords(r, spellChecker, "field1");
  assertEquals(3, searchers.size());
  int num_field1 = this.numdoc();
  addwords(r, spellChecker, "field2");
  assertEquals(4, searchers.size());
  int num_field2 = this.numdoc();
  assertEquals(num_field2, num_field1 + 1);
  int numThreads = 5 + random().nextInt(5);
  ExecutorService executor = Executors.newFixedThreadPool(numThreads, new NamedThreadFactory("testConcurrentAccess"));
  SpellCheckWorker[] workers = new SpellCheckWorker[numThreads];
  for (int i = 0; i < numThreads; i++) {
    SpellCheckWorker spellCheckWorker = new SpellCheckWorker(r);
    executor.execute(spellCheckWorker);
    workers[i] = spellCheckWorker;
    
  }
  int iterations = 5 + random().nextInt(5);
  for (int i = 0; i < iterations; i++) {
    Thread.sleep(100);
    // concurrently reset the spell index
    spellChecker.setSpellIndex(this.spellindex);
    // for debug - prints the internal open searchers 
    // showSearchersOpen();
  }
  
  spellChecker.close();
  executor.shutdown();
  // wait for 60 seconds - usually this is very fast but coverage runs could take quite long
  executor.awaitTermination(60L, TimeUnit.SECONDS);
  
  for (int i = 0; i < workers.length; i++) {
    assertFalse(String.format(Locale.ROOT, "worker thread %d failed", i), workers[i].failed);
    assertTrue(String.format(Locale.ROOT, "worker thread %d is still running but should be terminated", i), workers[i].terminated);
  }
  // 4 searchers more than iterations
  // 1. at creation
  // 2. clearIndex()
  // 2. and 3. during addwords
  assertEquals(iterations + 4, searchers.size());
  assertSearchersClosed();
  r.close();
}
 
源代码22 项目: lucene-solr   文件: TestParallelDrillSideways.java
@BeforeClass
public static void prepareExecutor() {
  executor = Executors.newCachedThreadPool(new NamedThreadFactory("TestParallelDrillSideways"));
}
 
源代码23 项目: lucene-solr   文件: ConfusionMatrixGenerator.java
/**
 * get the {@link org.apache.lucene.classification.utils.ConfusionMatrixGenerator.ConfusionMatrix} of a given {@link Classifier},
 * generated on the given {@link IndexReader}, class and text fields.
 *
 * @param reader              the {@link IndexReader} containing the index used for creating the {@link Classifier}
 * @param classifier          the {@link Classifier} whose confusion matrix has to be generated
 * @param classFieldName      the name of the Lucene field used as the classifier's output
 * @param textFieldName       the nome the Lucene field used as the classifier's input
 * @param timeoutMilliseconds timeout to wait before stopping creating the confusion matrix
 * @param <T>                 the return type of the {@link ClassificationResult} returned by the given {@link Classifier}
 * @return a {@link org.apache.lucene.classification.utils.ConfusionMatrixGenerator.ConfusionMatrix}
 * @throws IOException if problems occurr while reading the index or using the classifier
 */
public static <T> ConfusionMatrix getConfusionMatrix(IndexReader reader, Classifier<T> classifier, String classFieldName,
                                                     String textFieldName, long timeoutMilliseconds) throws IOException {

  ExecutorService executorService = Executors.newFixedThreadPool(1, new NamedThreadFactory("confusion-matrix-gen-"));

  try {

    Map<String, Map<String, Long>> counts = new HashMap<>();
    IndexSearcher indexSearcher = new IndexSearcher(reader);
    TopDocs topDocs = indexSearcher.search(new TermRangeQuery(classFieldName, null, null, true, true), Integer.MAX_VALUE);
    double time = 0d;

    int counter = 0;
    for (ScoreDoc scoreDoc : topDocs.scoreDocs) {

      if (timeoutMilliseconds > 0 && time >= timeoutMilliseconds) {
        break;
      }

      Document doc = reader.document(scoreDoc.doc);
      String[] correctAnswers = doc.getValues(classFieldName);

      if (correctAnswers != null && correctAnswers.length > 0) {
        Arrays.sort(correctAnswers);
        ClassificationResult<T> result;
        String text = doc.get(textFieldName);
        if (text != null) {
          try {
            // fail if classification takes more than 5s
            long start = System.currentTimeMillis();
            result = executorService.submit(() -> classifier.assignClass(text)).get(5, TimeUnit.SECONDS);
            long end = System.currentTimeMillis();
            time += end - start;

            if (result != null) {
              T assignedClass = result.getAssignedClass();
              if (assignedClass != null) {
                counter++;
                String classified = assignedClass instanceof BytesRef ? ((BytesRef) assignedClass).utf8ToString() : assignedClass.toString();

                String correctAnswer;
                if (Arrays.binarySearch(correctAnswers, classified) >= 0) {
                  correctAnswer = classified;
                } else {
                  correctAnswer = correctAnswers[0];
                }

                Map<String, Long> stringLongMap = counts.get(correctAnswer);
                if (stringLongMap != null) {
                  Long aLong = stringLongMap.get(classified);
                  if (aLong != null) {
                    stringLongMap.put(classified, aLong + 1);
                  } else {
                    stringLongMap.put(classified, 1L);
                  }
                } else {
                  stringLongMap = new HashMap<>();
                  stringLongMap.put(classified, 1L);
                  counts.put(correctAnswer, stringLongMap);
                }

              }
            }
          } catch (TimeoutException timeoutException) {
            // add classification timeout
            time += 5000;
          } catch (ExecutionException | InterruptedException executionException) {
            throw new RuntimeException(executionException);
          }

        }
      }
    }
    return new ConfusionMatrix(counts, time / counter, counter);
  } finally {
    executorService.shutdown();
  }
}
 
源代码24 项目: lucene-solr   文件: TestCachePurging.java
private static void doConcurrentPurgesAndUpdatesTest() throws Exception {

    final CountDownLatch startUpdating = new CountDownLatch(1);
    final CountDownLatch finishUpdating = new CountDownLatch(1);

    try (final Monitor monitor = new Monitor(ANALYZER)) {
      Runnable updaterThread = () -> {
        try {
          startUpdating.await();
          for (int i = 200; i < 400; i++) {
            monitor.register(newMonitorQuery(i));
          }
          finishUpdating.countDown();
        } catch (Exception e) {
          throw new RuntimeException(e);
        }
      };

      ExecutorService executor = Executors.newFixedThreadPool(1, new NamedThreadFactory("updaters"));
      try {
        executor.submit(updaterThread);

        for (int i = 0; i < 200; i++) {
          monitor.register(newMonitorQuery(i));
        }
        for (int i = 20; i < 80; i++) {
          monitor.deleteById(Integer.toString(i));
        }

        assertEquals(200, monitor.getQueryCacheStats().cachedQueries);

        startUpdating.countDown();
        monitor.purgeCache();
        finishUpdating.await();

        assertEquals(340, monitor.getQueryCacheStats().cachedQueries);
        Document doc = new Document();
        doc.add(newTextField("field", "test", Field.Store.NO));
        MatchingQueries<QueryMatch> matcher = monitor.match(doc, QueryMatch.SIMPLE_MATCHER);
        assertEquals(0, matcher.getErrors().size());
        assertEquals(340, matcher.getMatchCount());
      } finally {
        executor.shutdownNow();
      }
    }
  }
 
源代码25 项目: lucene-solr   文件: TestIndexSearcher.java
public void testHugeN() throws Exception {
  ExecutorService service = new ThreadPoolExecutor(4, 4, 0L, TimeUnit.MILLISECONDS,
                                 new LinkedBlockingQueue<Runnable>(),
                                 new NamedThreadFactory("TestIndexSearcher"));

  IndexSearcher searchers[] = new IndexSearcher[] {
      new IndexSearcher(reader),
      new IndexSearcher(reader, service)
  };
  Query queries[] = new Query[] {
      new MatchAllDocsQuery(),
      new TermQuery(new Term("field", "1"))
  };
  Sort sorts[] = new Sort[] {
      null,
      new Sort(new SortField("field2", SortField.Type.STRING))
  };
  ScoreDoc afters[] = new ScoreDoc[] {
      null,
      new FieldDoc(0, 0f, new Object[] { new BytesRef("boo!") })
  };

  for (IndexSearcher searcher : searchers) {
    for (ScoreDoc after : afters) {
      for (Query query : queries) {
        for (Sort sort : sorts) {
          searcher.search(query, Integer.MAX_VALUE);
          searcher.searchAfter(after, query, Integer.MAX_VALUE);
          if (sort != null) {
            searcher.search(query, Integer.MAX_VALUE, sort);
            searcher.search(query, Integer.MAX_VALUE, sort, true);
            searcher.search(query, Integer.MAX_VALUE, sort, false);
            searcher.searchAfter(after, query, Integer.MAX_VALUE, sort);
            searcher.searchAfter(after, query, Integer.MAX_VALUE, sort, true);
            searcher.searchAfter(after, query, Integer.MAX_VALUE, sort, false);
          }
        }
      }
    }
  }

  TestUtil.shutdownExecutorService(service);
}
 
源代码26 项目: lucene-solr   文件: TestBooleanQuery.java
public void testDeMorgan() throws Exception {
  Directory dir1 = newDirectory();
  RandomIndexWriter iw1 = new RandomIndexWriter(random(), dir1);
  Document doc1 = new Document();
  doc1.add(newTextField("field", "foo bar", Field.Store.NO));
  iw1.addDocument(doc1);
  IndexReader reader1 = iw1.getReader();
  iw1.close();

  Directory dir2 = newDirectory();
  RandomIndexWriter iw2 = new RandomIndexWriter(random(), dir2);
  Document doc2 = new Document();
  doc2.add(newTextField("field", "foo baz", Field.Store.NO));
  iw2.addDocument(doc2);
  IndexReader reader2 = iw2.getReader();
  iw2.close();

  BooleanQuery.Builder query = new BooleanQuery.Builder(); // Query: +foo -ba*
  query.add(new TermQuery(new Term("field", "foo")), BooleanClause.Occur.MUST);
  WildcardQuery wildcardQuery = new WildcardQuery(new Term("field", "ba*"));
  wildcardQuery.setRewriteMethod(MultiTermQuery.SCORING_BOOLEAN_REWRITE);
  query.add(wildcardQuery, BooleanClause.Occur.MUST_NOT);

  MultiReader multireader = new MultiReader(reader1, reader2);
  IndexSearcher searcher = newSearcher(multireader);
  assertEquals(0, searcher.search(query.build(), 10).totalHits.value);

  final ExecutorService es = Executors.newCachedThreadPool(new NamedThreadFactory("NRT search threads"));
  searcher = new IndexSearcher(multireader, es);
  if (VERBOSE)
    System.out.println("rewritten form: " + searcher.rewrite(query.build()));
  assertEquals(0, searcher.search(query.build(), 10).totalHits.value);
  es.shutdown();
  es.awaitTermination(1, TimeUnit.SECONDS);

  multireader.close();
  reader1.close();
  reader2.close();
  dir1.close();
  dir2.close();
}