下面列出了怎么用org.apache.lucene.search.Query的API类实例代码及写法,或者点击链接到github查看源代码。
public void testSynonymsBoost_singleTermQueryMultiTermSynonyms_shouldParseBoostedQuery() throws Exception {
//leopard, big cat|0.8, bagheera|0.9, panthera pardus|0.85
Query q = QParser.getParser("leopard", req(params("df", "t_pick_best_boosted_foo"))).getQuery();
assertEquals("((t_pick_best_boosted_foo:\"big cat\")^0.8 | (t_pick_best_boosted_foo:bagheera)^0.9 | (t_pick_best_boosted_foo:\"panthera pardus\")^0.85 | t_pick_best_boosted_foo:leopard)", q.toString());
q = QParser.getParser("leopard", req(params("df", "t_as_distinct_boosted_foo"))).getQuery();
assertEquals("((t_as_distinct_boosted_foo:\"big cat\")^0.8 (t_as_distinct_boosted_foo:bagheera)^0.9 (t_as_distinct_boosted_foo:\"panthera pardus\")^0.85 t_as_distinct_boosted_foo:leopard)", q.toString());
q = QParser.getParser("leopard", req(params("df", "t_as_same_term_boosted_foo"))).getQuery();
assertEquals("((t_as_same_term_boosted_foo:\"big cat\")^0.8 (t_as_same_term_boosted_foo:bagheera)^0.9 (t_as_same_term_boosted_foo:\"panthera pardus\")^0.85 t_as_same_term_boosted_foo:leopard)", q.toString());
//lion => panthera leo|0.9, simba leo|0.8, kimba|0.75
q = QParser.getParser("lion", req(params("df", "t_pick_best_boosted_foo"))).getQuery();
assertEquals("((t_pick_best_boosted_foo:\"panthera leo\")^0.9 | (t_pick_best_boosted_foo:\"simba leo\")^0.8 | (t_pick_best_boosted_foo:kimba)^0.75)", q.toString());
q = QParser.getParser("lion", req(params("df", "t_as_distinct_boosted_foo"))).getQuery();
assertEquals("((t_as_distinct_boosted_foo:\"panthera leo\")^0.9 (t_as_distinct_boosted_foo:\"simba leo\")^0.8 (t_as_distinct_boosted_foo:kimba)^0.75)", q.toString());
q = QParser.getParser("lion", req(params("df", "t_as_same_term_boosted_foo"))).getQuery();
assertEquals("((t_as_same_term_boosted_foo:\"panthera leo\")^0.9 (t_as_same_term_boosted_foo:\"simba leo\")^0.8 (t_as_same_term_boosted_foo:kimba)^0.75)", q.toString());
}
@Override
public double scoreQuery(String qno, Query q) {
double queryLength = q.toString().split(" ").length;
TrecRuns topic = run.getTopic(qno);
double D = topic.get(topic.size() - 1).getScore();
double totalScore = 0;
// Handle the case that the query retrieves less than k documents.
int thisK = k;
if (topic.size() < k) {
thisK = topic.size();
}
for (int i = 0; i < thisK; i++) {
double d = topic.get(i).getScore();
totalScore += sumScores(queryLength, d, D);
}
return (1.0 / k) * totalScore;
}
public void testGeoHash() throws IOException, ParseException {
BaseFieldManager fieldManager = getFieldManager(new NoStopWordStandardAnalyzer());
setupGisField(fieldManager);
DirectoryReader reader = DirectoryReader.open(_dir);
IndexSearcher searcher = new IndexSearcher(reader);
SuperParser parser = new SuperParser(Version.LUCENE_43, fieldManager, true, null, ScoreType.SUPER, new Term(
BlurConstants.PRIME_DOC, BlurConstants.PRIME_DOC_VALUE));
Query query = parser.parse("fam.geo:\"GeoHash(uvgb26kqsm0)\"");
TopDocs topDocs = searcher.search(query, 10);
assertEquals(1, topDocs.totalHits);
reader.close();
}
@Test
public void shouldDeflateGzipAndParseJsonTermsFromRedisOnGetCommand() throws SyntaxError, IOException {
when(localParamsMock.get("command")).thenReturn("get");
when(localParamsMock.get("key")).thenReturn("simpleKey");
when(localParamsMock.get("compression")).thenReturn("gzip");
when(localParamsMock.get("serialization")).thenReturn("json");
when(localParamsMock.get(QueryParsing.V)).thenReturn("string_field");
when(jedisMock.get(any(byte[].class))).thenReturn(Compressor.compressGzip("[100,200,300]".getBytes()));
when(requestMock.getSchema()).thenReturn(schema);
when(schema.getQueryAnalyzer()).thenReturn(new StandardAnalyzer());
redisQParser = new RedisQParser("string_field", localParamsMock, paramsMock, requestMock, commandHandler);
final Query query = redisQParser.parse();
verify(jedisMock).get("simpleKey".getBytes());
IndexSearcher searcher = new IndexSearcher(new MultiReader());
final Set<Term> terms = extractTerms(searcher, query);
Assert.assertEquals(3, terms.size());
}
private void queryBasedPercolating(Engine.Searcher percolatorSearcher, PercolateContext context, QueryCollector percolateCollector) throws IOException {
Query percolatorTypeFilter = context.indexService().mapperService().documentMapper(TYPE_NAME).typeFilter();
final Query filter;
if (context.aliasFilter() != null) {
BooleanQuery.Builder booleanFilter = new BooleanQuery.Builder();
booleanFilter.add(context.aliasFilter(), BooleanClause.Occur.MUST);
booleanFilter.add(percolatorTypeFilter, BooleanClause.Occur.MUST);
filter = booleanFilter.build();
} else {
filter = percolatorTypeFilter;
}
Query query = Queries.filtered(context.percolateQuery(), filter);
percolatorSearcher.searcher().search(query, percolateCollector);
percolateCollector.aggregatorCollector.postCollection();
if (context.aggregations() != null) {
aggregationPhase.execute(context);
}
}
@Override
public QParser createParser(String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest req) {
final JoinQParserPlugin plugin = this;
return new QParser(qstr, localParams, params, req) {
@Override
public Query parse() throws SyntaxError {
if (localParams != null && localParams.get(METHOD) != null) {
// TODO Make sure 'method' is valid value here and give users a nice error
final Method explicitMethod = Method.valueOf(localParams.get(METHOD));
return explicitMethod.makeFilter(this, plugin);
}
// Legacy join behavior before introduction of SOLR-13892
if(localParams!=null && localParams.get(ScoreJoinQParserPlugin.SCORE)!=null) {
return new ScoreJoinQParserPlugin().createParser(qstr, localParams, params, req).parse();
} else {
return Method.index.makeFilter(this, plugin);
}
}
};
}
/**
* Search for the user-specified query expression in the current page.
* @throws Exception if an error occurs.
*/
private void search() throws Exception {
final QueryParser parser = new QueryParser("contents", new StandardAnalyzer());
final Query q = parser.parse(query);
final MemoryIndex index = new MemoryIndex();
final Link link = new Link(url);
final PageData pageData = new SimpleHttpClientParser().load(link);
index.addField("contents", pageData.getData().toString(), new StandardAnalyzer());
final IndexSearcher searcher = index.createSearcher();
final Hits hits = searcher.search(q);
@SuppressWarnings("rawtypes")
final Iterator it = hits.iterator();
float relevance = 0f;
if (it.hasNext()) {
while (it.hasNext()) {
final Hit hit = (Hit) it.next();
relevance += ((float) Math.round(hit.getScore() * 1000)) / 10;
}
matchedLinks.add(new LinkMatch(url, relevance));
}
}
/**
*
* @param Type
* @param sDocMetadata
* @param sBody
* @param sMetadata
* @return
* @throws PDException
*/
@Override
protected ArrayList<String> Search(String Type, String sDocMetadata, String sBody, String sMetadata) throws PDException
{
ArrayList<String> Res=new ArrayList();
IndexSearcher isearcher=null;
try {
isearcher=SM.acquire();
sBody=sBody.toLowerCase();
Query query = new QueryParser(F_FULLTEXT,analyzer).parse(sBody);
ScoreDoc[] hits = isearcher.search(query, MAXRESULTS).scoreDocs;
for (ScoreDoc hit : hits)
Res.add(isearcher.doc(hit.doc).get(F_ID));
SM.release(isearcher);
//ireader.close();
//directory.close();
} catch (Exception ex)
{
try {
SM.release(isearcher);
} catch (Exception e)
{}
PDException.GenPDException("Error_Searching_doc_FT:", ex.getLocalizedMessage());
}
return(Res);
}
@Override
public Query makeQuery(SpatialArgs args) {
final SpatialOperation op = args.getOperation();
Shape shape = args.getShape();
int detailLevel = grid.getLevelForDistance(args.resolveDistErr(ctx, distErrPct));
if (op == SpatialOperation.Intersects) {
if (isGridAlignedShape(args.getShape())) {
return makeGridShapeIntersectsQuery(args.getShape());
}
return new IntersectsPrefixTreeQuery(
shape, getFieldName(), grid, detailLevel, prefixGridScanLevel);
} else if (op == SpatialOperation.IsWithin) {
return new WithinPrefixTreeQuery(
shape, getFieldName(), grid, detailLevel, prefixGridScanLevel,
-1);//-1 flag is slower but ensures correct results
} else if (op == SpatialOperation.Contains) {
return new ContainsPrefixTreeQuery(shape, getFieldName(), grid, detailLevel,
multiOverlappingIndexedShapes);
}
throw new UnsupportedSpatialOperation(op);
}
public void testFiltersOutOfOrder1() {
// the hashcode should be the same even when the list
// of filters is in a different order
Sort sort = new Sort(new SortField("test", SortField.Type.INT));
BooleanQuery.Builder query = new BooleanQuery.Builder();
query.add(new TermQuery(new Term("test", "field")), Occur.MUST);
List<Query> filters = Arrays.<Query>asList(new TermQuery(new Term("test", "field")),
new TermQuery(new Term("test2", "field2")));
QueryResultKey qrk1 = new QueryResultKey(query.build() , filters, sort, 1);
List<Query> filters2 = Arrays.<Query>asList(new TermQuery(new Term("test2", "field2")),
new TermQuery(new Term("test", "field")));
QueryResultKey qrk2 = new QueryResultKey(query.build() , filters2, sort, 1);
assertKeyEquals(qrk1, qrk2);
}
private TopDocs knnSearch(String text) throws IOException {
BooleanQuery.Builder bq = new BooleanQuery.Builder();
NearestFuzzyQuery nearestFuzzyQuery = new NearestFuzzyQuery(analyzer);
for (String fieldName : textFieldNames) {
nearestFuzzyQuery.addTerms(text, fieldName);
}
bq.add(nearestFuzzyQuery, BooleanClause.Occur.MUST);
Query classFieldQuery = new WildcardQuery(new Term(classFieldName, "*"));
bq.add(new BooleanClause(classFieldQuery, BooleanClause.Occur.MUST));
if (query != null) {
bq.add(query, BooleanClause.Occur.MUST);
}
return indexSearcher.search(bq.build(), k);
}
private boolean containsSuperQueries(Query query) {
if (query instanceof BooleanQuery) {
BooleanQuery booleanQuery = (BooleanQuery) query;
for (BooleanClause bc : booleanQuery) {
if (containsSuperQueries(bc.getQuery())) {
return true;
}
}
return false;
} else if (query instanceof SuperQuery) {
return true;
} else {
return false;
}
}
Query[] getDrillDownQueries() {
Query[] dimQueries = new Query[this.dimQueries.size()];
for (int i = 0; i < dimQueries.length; ++i) {
dimQueries[i] = this.dimQueries.get(i).build();
}
return dimQueries;
}
@Override
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) {
return NumericRangeQuery.newIntRange(names().indexName(), numericPrecisionStep(),
lowerTerm == null ? null : (int)parseValue(lowerTerm),
upperTerm == null ? null : (int)parseValue(upperTerm),
includeLower, includeUpper);
}
private DrillDownQuery getDrillDownQuery(final DrillDownQuery query, Query[] queries,
final String excludedDimension) {
final DrillDownQuery ddl = new DrillDownQuery(config, query.getBaseQuery());
query.getDims().forEach((dim, pos) -> {
if (!dim.equals(excludedDimension))
ddl.add(dim, queries[pos]);
});
return ddl.getDims().size() == queries.length ? null : ddl;
}
public void testEdismaxSimpleExtension() throws SyntaxError {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("q", "foo bar");
params.set("qf", "subject title^5");
params.set("qf_fr", "subject_fr title_fr^5");
params.set("qf_en", "subject_en title_en^5");
params.set("qf_es", "subject_es title_es^5");
MultilanguageQueryParser parser = new MultilanguageQueryParser("foo bar", new ModifiableSolrParams(), params, req(params));
Query query = parser.parse();
assertNotNull(query);
assertTrue(containsClause(query, "title", "foo", 5, false));
assertTrue(containsClause(query, "title", "bar", 5, false));
assertTrue(containsClause(query, "subject", "foo", 1, false));
assertTrue(containsClause(query, "subject", "bar", 1, false));
params.set("language", "es");
parser = new MultilanguageQueryParser("foo bar", new ModifiableSolrParams(), params, req(params));
query = parser.parse();
assertNotNull(query);
assertTrue(containsClause(query, "title_es", "foo", 5, false));
assertTrue(containsClause(query, "title_es", "bar", 5, false));
assertTrue(containsClause(query, "subject_es", "foo", 1, false));
assertTrue(containsClause(query, "subject_es", "bar", 1, false));
FuzzyDismaxQParser parser2 = new FuzzyDismaxQParser("foo bar absence", new ModifiableSolrParams(), params, req(params));
query = parser2.parse();
assertNotNull(query);
assertTrue(containsClause(query, "title", "foo", 5, false));
assertTrue(containsClause(query, "title", "bar", 5, false));
assertTrue(containsClause(query, "title", "absence", 5, true));
}
static List<QueryCacheEntry> decompose(MonitorQuery mq, QueryDecomposer decomposer) {
int upto = 0;
List<QueryCacheEntry> cacheEntries = new ArrayList<>();
for (Query subquery : decomposer.decompose(mq.getQuery())) {
cacheEntries.add(new QueryCacheEntry(mq.getId() + "_" + upto, mq.getId(), subquery, mq.getMetadata()));
upto++;
}
return cacheEntries;
}
@Override
public Query getFieldQuery(QParser parser, SchemaField field, String externalVal) {
CurrencyValue value = CurrencyValue.parse(externalVal, defaultCurrency);
CurrencyValue valueDefault;
valueDefault = value.convertTo(provider, defaultCurrency);
return getRangeQueryInternal(parser, field, valueDefault, valueDefault, true, true);
}
@Test(expected = IllegalStateException.class)
public void testIndexSearcherNullness() throws IOException {
String text = "This is a test. Just a test highlighting without a searcher. Feel free to ignore.";
Query query = new TermQuery(new Term("body", "highlighting"));
try (Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
IndexReader indexReader = indexWriter.getReader()) {
IndexSearcher searcher = newSearcher(indexReader);
UnifiedHighlighter highlighter = new UnifiedHighlighter(searcher, indexAnalyzer);
highlighter.highlightWithoutSearcher("body", query, text, 1);//should throw
}
}
/**
* Highlight (bold,color) query words in result-document. Set HighlightResult for content or description.
*
* @param query
* @param analyzer
* @param doc
* @param resultDocument
* @throws IOException
*/
private void doHighlight(final Query query, final Analyzer analyzer, final Document doc, final ResultDocument resultDocument) throws IOException {
final Highlighter highlighter = new Highlighter(new SimpleHTMLFormatter(HIGHLIGHT_PRE_TAG, HIGHLIGHT_POST_TAG), new QueryScorer(query));
// Get 3 best fragments of content and seperate with a "..."
try {
// highlight content
final String content = doc.get(AbstractOlatDocument.CONTENT_FIELD_NAME);
TokenStream tokenStream = analyzer.tokenStream(AbstractOlatDocument.CONTENT_FIELD_NAME, new StringReader(content));
String highlightResult = highlighter.getBestFragments(tokenStream, content, 3, HIGHLIGHT_SEPARATOR);
// if no highlightResult is in content => look in description
if (highlightResult.length() == 0) {
final String description = doc.get(AbstractOlatDocument.DESCRIPTION_FIELD_NAME);
tokenStream = analyzer.tokenStream(AbstractOlatDocument.DESCRIPTION_FIELD_NAME, new StringReader(description));
highlightResult = highlighter.getBestFragments(tokenStream, description, 3, HIGHLIGHT_SEPARATOR);
resultDocument.setHighlightingDescription(true);
}
resultDocument.setHighlightResult(highlightResult);
// highlight title
final String title = doc.get(AbstractOlatDocument.TITLE_FIELD_NAME);
tokenStream = analyzer.tokenStream(AbstractOlatDocument.TITLE_FIELD_NAME, new StringReader(title));
final String highlightTitle = highlighter.getBestFragments(tokenStream, title, 3, " ");
resultDocument.setHighlightTitle(highlightTitle);
} catch (final InvalidTokenOffsetsException e) {
log.warn("", e);
}
}
/**
* Constructs a query to retrieve documents that equal the input envelope.
*
* @return the spatial query
*/
Query makeEquals(Rectangle bbox) {
// docMinX = queryExtent.getMinX() AND docMinY = queryExtent.getMinY() AND docMaxX = queryExtent.getMaxX() AND docMaxY = queryExtent.getMaxY()
Query qMinX = makeNumberTermQuery(field_minX, bbox.getMinX());
Query qMinY = makeNumberTermQuery(field_minY, bbox.getMinY());
Query qMaxX = makeNumberTermQuery(field_maxX, bbox.getMaxX());
Query qMaxY = makeNumberTermQuery(field_maxY, bbox.getMaxY());
return makeQuery(BooleanClause.Occur.MUST, qMinX, qMinY, qMaxX, qMaxY);
}
/**
* Return a String array of the fields to be highlighted.
* Falls back to the programmatic defaults, or the default search field if the list of fields
* is not specified in either the handler configuration or the request.
* @param query The current Query
* @param request The current SolrQueryRequest
* @param defaultFields Programmatic default highlight fields, used if nothing is specified in the handler config or the request.
*/
public String[] getHighlightFields(Query query, SolrQueryRequest request, String[] defaultFields) {
String fields[] = request.getParams().getParams(HighlightParams.FIELDS);
// if no fields specified in the request, or the handler, fall back to programmatic default, or default search field.
if(emptyArray(fields)) {
// use default search field from request if highlight fieldlist not specified.
if (emptyArray(defaultFields)) {
String defaultSearchField = request.getParams().get(CommonParams.DF);
fields = null == defaultSearchField ? new String[]{} : new String[]{defaultSearchField};
} else {
fields = defaultFields;
}
} else {
Set<String> expandedFields = new LinkedHashSet<String>();
Collection<String> storedHighlightFieldNames = request.getSearcher().getDocFetcher().getStoredHighlightFieldNames();
for (String field : fields) {
expandWildcardsInHighlightFields(
expandedFields,
storedHighlightFieldNames,
SolrPluginUtils.split(field));
}
fields = expandedFields.toArray(new String[]{});
}
// Trim them now in case they haven't been yet. Not needed for all code-paths above but do it here.
for (int i = 0; i < fields.length; i++) {
fields[i] = fields[i].trim();
}
return fields;
}
@Override
protected Query getFieldQuery(final String field, final String queryText, boolean quoted)
throws ParseException {
final Pair<String,String> splitExtensionField = this.extensions
.splitExtensionField(defaultField, field);
final ParserExtension extension = this.extensions
.getExtension(splitExtensionField.cud);
if (extension != null) {
return extension.parse(new ExtensionQuery(this, splitExtensionField.cur,
queryText));
}
return super.getFieldQuery(field, queryText, quoted);
}
public void testStopRewrite() throws Exception {
Query q = new Query() {
@Override
public String toString(String field) {
return "DummyQuery";
}
@Override
public void visit(QueryVisitor visitor) {
}
@Override
public boolean equals(Object o) {
throw new AssertionError();
}
@Override
public int hashCode() {
throw new AssertionError();
}
};
make1d1fIndex( "a" );
assertNotNull(reader);
new FieldQuery(q, reader, true, true );
}
public void testWithScore() throws Exception {
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random())));
FacetsConfig config = new FacetsConfig();
for (int i = 0; i < 4; i++) {
Document doc = new Document();
doc.add(new NumericDocValuesField("price", (i+1)));
doc.add(new FacetField("a", Integer.toString(i % 2)));
iw.addDocument(config.build(taxoWriter, doc));
}
DirectoryReader r = DirectoryReader.open(iw);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector fc = new FacetsCollector(true);
// score documents by their 'price' field - makes asserting the correct counts for the categories easier
Query q = new FunctionQuery(new LongFieldSource("price"));
FacetsCollector.search(newSearcher(r), q, 10, fc);
Facets facets = new TaxonomyFacetSumValueSource(taxoReader, config, fc, DoubleValuesSource.SCORES);
assertEquals("dim=a path=[] value=10.0 childCount=2\n 1 (6.0)\n 0 (4.0)\n", facets.getTopChildren(10, "a").toString());
iw.close();
IOUtils.close(taxoWriter, taxoReader, taxoDir, r, indexDir);
}
@Test
public void testStopWordRemoval() throws Exception {
float tie = (float) Math.random();
Query q = buildWithStopWords("a stopA b", tie, "f1");
assertThat(q, bq(1f,
dtq(Occur.SHOULD, 1f, "f1", "a"),
dtq(Occur.SHOULD, 1f, "f1", "b")
));
}
public static <T extends Query> T assertDisjunctionSubQuery(Query query, Class<T> subqueryType, int i) {
assertThat(query, instanceOf(DisjunctionMaxQuery.class));
DisjunctionMaxQuery q = (DisjunctionMaxQuery) query;
assertThat(q.getDisjuncts().size(), greaterThan(i));
assertThat(q.getDisjuncts().get(i), instanceOf(subqueryType));
return subqueryType.cast(q.getDisjuncts().get(i));
}
void assertHits(Query q, float scores[]) throws Exception {
ScoreDoc expected[] = new ScoreDoc[scores.length];
int expectedDocs[] = new int[scores.length];
for (int i = 0; i < expected.length; i++) {
expectedDocs[i] = i;
expected[i] = new ScoreDoc(i, scores[i]);
}
TopDocs docs = searcher.search(q, documents.size(),
new Sort(new SortField("id", SortField.Type.STRING)), true);
CheckHits.checkHits(random(), q, "", searcher, expectedDocs);
CheckHits.checkHitsQuery(q, expected, docs.scoreDocs, expectedDocs);
CheckHits.checkExplanations(q, "", searcher);
}
@Override
public Query parse(String query) throws ParseException{
Query q = super.parse(query);
if(defaultAndnStopword){
inQueryFix = true;
Query fix = super.parse(query);
List<BooleanClause> clauses = new ArrayList<BooleanClause>();
clauses.add(new BooleanClause(q,Occur.SHOULD));
clauses.add(new BooleanClause(fix,Occur.SHOULD));
return getBooleanQuery(clauses);
}
return q;
}
private Query getPointGroupQuery(SchemaField sf,
int size,
LongHashSet groupSet) {
Iterator<LongCursor> it = groupSet.iterator();
List<String> values = new ArrayList<>(size);
FieldType ft = sf.getType();
while (it.hasNext()) {
LongCursor cursor = it.next();
values.add(numericToString(ft, cursor.value));
}
return sf.getType().getSetQuery(null, sf, values);
}