org.apache.lucene.search.highlight.SimpleFragmenter#org.apache.lucene.queryparser.classic.MultiFieldQueryParser源码实例Demo

下面列出了org.apache.lucene.search.highlight.SimpleFragmenter#org.apache.lucene.queryparser.classic.MultiFieldQueryParser 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: bioasq   文件: LuceneDocumentRetrievalExecutor.java
@Override
public void initialize(UimaContext context) throws ResourceInitializationException {
  super.initialize(context);
  hits = UimaContextHelper.getConfigParameterIntValue(context, "hits", 100);
  // query constructor
  constructor = UimaContextHelper.createObjectFromConfigParameter(context,
          "query-string-constructor", "query-string-constructor-params",
          BooleanBagOfPhraseQueryStringConstructor.class, QueryStringConstructor.class);
  // lucene
  Analyzer analyzer = UimaContextHelper.createObjectFromConfigParameter(context, "query-analyzer",
          "query-analyzer-params", StandardAnalyzer.class, Analyzer.class);
  String[] fields = UimaContextHelper.getConfigParameterStringArrayValue(context, "fields");
  parser = new MultiFieldQueryParser(fields, analyzer);
  String index = UimaContextHelper.getConfigParameterStringValue(context, "index");
  try {
    reader = DirectoryReader.open(FSDirectory.open(Paths.get(index)));
  } catch (IOException e) {
    throw new ResourceInitializationException(e);
  }
  searcher = new IndexSearcher(reader);
  idFieldName = UimaContextHelper.getConfigParameterStringValue(context, "id-field", null);
  titleFieldName = UimaContextHelper.getConfigParameterStringValue(context, "title-field", null);
  textFieldName = UimaContextHelper.getConfigParameterStringValue(context, "text-field", null);
  uriPrefix = UimaContextHelper.getConfigParameterStringValue(context, "uri-prefix", null);
}
 
@Override
public SearchResult search(io.gravitee.rest.api.service.search.query.Query query) throws TechnicalException {
    QueryParser parser = new MultiFieldQueryParser(new String[]{
            "name",
            "content"
    }, analyzer);
    parser.setFuzzyMinSim(0.6f);

    try {
        final Query parse = parser.parse(QueryParserBase.escape(query.getQuery()));

        BooleanQuery.Builder bq = new BooleanQuery.Builder();
        bq.add(parse, BooleanClause.Occur.MUST);
        bq.add(new TermQuery(new Term(FIELD_TYPE, FIELD_TYPE_VALUE)), BooleanClause.Occur.MUST);

        return search(bq.build());
    } catch (ParseException pe) {
        logger.error("Invalid query to search for page documents", pe);
        throw new TechnicalException("Invalid query to search for page documents", pe);
    }
}
 
源代码3 项目: lucene4ir   文件: FieldedRetrievalApp.java
/**
 * Performs scoring on the query provided.
 * @param qno
 * @param queryTerms
 * @return
 */
public ScoreDoc[] runQuery(String qno, String queryTerms){
    ScoreDoc[] hits = null;
    String[] fields = new String[fl.fields.size()];
    Map<String, Float> boosts = new HashMap<>();
    int i = 0;
    for (Field f : fl.fields) {
        fields[i] = f.fieldName;
        boosts.put(f.fieldName, f.fieldBoost);
        i++;
    }
    try {
        MultiFieldQueryParser mfq = new MultiFieldQueryParser(fields, analyzer, boosts);
        Query q = mfq.parse(queryTerms);
        System.out.println(qno+ ": " + q.toString());
        try {
            TopDocs results = searcher.search(q, p.maxResults);
            hits = results.scoreDocs;
        } catch (IOException ioe) {
            System.out.println(" caught a " + ioe.getClass() +
                    "\n with message: " + ioe.getMessage());
        }
    } catch (ParseException pe){
        System.out.println("Can't parse query");
    }
    return hits;
}
 
源代码4 项目: lucene-geo-gazetteer   文件: GeoNameResolver.java
private HashMap<String, List<Location>> resolveEntities(List<String> locationNames,
													  int count, IndexReader reader) throws IOException {
	if (locationNames.size() >= 200)
		hitsPerPage = 5; // avoid heavy computation
	IndexSearcher searcher = new IndexSearcher(reader);
	Query q = null;

	HashMap<String, List<Location>> allCandidates = new HashMap<String, List<Location>>();

	for (String name : locationNames) {

		if (!allCandidates.containsKey(name)) {
			try {
				//query is wrapped in additional quotes (") to avoid query tokenization on space
				q = new MultiFieldQueryParser(new String[] { FIELD_NAME_NAME,
						FIELD_NAME_ALTERNATE_NAMES }, analyzer).parse(String.format("\"%s\"", name) );

				Sort sort = new Sort(populationSort);
				//Fetch 3 times desired values, these will be sorted on code and only desired number will be kept
				ScoreDoc[] hits = searcher.search(q, hitsPerPage * 3 , sort).scoreDocs;

				getMatchingCandidates(searcher, allCandidates, name, hits);
			} catch (org.apache.lucene.queryparser.classic.ParseException e) {
				e.printStackTrace();
			}
		}
	}

	HashMap<String, List<Location>> resolvedEntities = new HashMap<String, List<Location>>();
	pickBestCandidates(resolvedEntities, allCandidates, count);
	return resolvedEntities;
}
 
@Override
public SearchResult search(io.gravitee.rest.api.service.search.query.Query query) throws TechnicalException {
    QueryParser parser = new MultiFieldQueryParser(new String[]{
            "firstname",
            "lastname",
            "displayname",
            "displayname_split",
            "email"
    }, analyzer);
    parser.setFuzzyMinSim(0.6f);
    parser.setAllowLeadingWildcard(true);

    try {
        Query parse = parser.parse(QueryParserBase.escape(query.getQuery()));

        BooleanQuery.Builder userQuery = new BooleanQuery.Builder();
        BooleanQuery.Builder userFieldsQuery = new BooleanQuery.Builder();

        userFieldsQuery.add(parse, BooleanClause.Occur.SHOULD);
        userFieldsQuery.add(new WildcardQuery(new Term("firstname", '*' + query.getQuery() + '*')), BooleanClause.Occur.SHOULD);
        userFieldsQuery.add(new WildcardQuery(new Term("lastname", '*' + query.getQuery() + '*')), BooleanClause.Occur.SHOULD);
        userFieldsQuery.add(new WildcardQuery(new Term("displayname", '*' + query.getQuery() + '*')), BooleanClause.Occur.SHOULD);
        userFieldsQuery.add(new WildcardQuery(new Term("email", '*' + query.getQuery() + '*')), BooleanClause.Occur.SHOULD);

        userQuery.add(userFieldsQuery.build(), BooleanClause.Occur.MUST);
        userQuery.add(new TermQuery(new Term(FIELD_TYPE, FIELD_TYPE_VALUE)), BooleanClause.Occur.MUST);

        return search(userQuery.build(), query.getPage());
    } catch (ParseException pe) {
        logger.error("Invalid query to search for user documents", pe);
        throw new TechnicalException("Invalid query to search for user documents", pe);
    }
}
 
源代码6 项目: adam   文件: UserProfileDataQueryMapper.java
@Override
public Query convertToLuceneQuery(@Nonnull UserProfileDataQuery query) {
    final QueryParser parser = new MultiFieldQueryParser(LUCENE_VERSION, toFieldsArray(query), _luceneAnalyzerFactory.createAnalyzer());
    parser.setDefaultOperator(AND);
    final String searchTerm = query.getSearchTerm();
    try {
        return parser.parse(searchTerm != null ? searchTerm : "");
    } catch (final ParseException e) {
        throw new RuntimeException("Unable to parse query: " + searchTerm, e);
    }
}
 
源代码7 项目: orientdb-lucene   文件: LuceneVsLuceneTest.java
@Test
public void testLuceneVsLucene() throws IOException, ParseException {
  InputStream stream = ClassLoader.getSystemResourceAsStream("testLuceneIndex.sql");

  databaseDocumentTx.command(new OCommandScript("sql", getScriptFromStream(stream))).execute();

  for (ODocument oDocument : databaseDocumentTx.browseClass("Song")) {

    String title = oDocument.field("title");
    if (title != null) {
      Document d = new Document();
      d.add(new Field("title", title, Field.Store.NO, Field.Index.ANALYZED));

      indexWriter.addDocument(d);

    }
  }

  indexWriter.close();
  IndexReader reader = DirectoryReader.open(getDirectory());
  IndexSearcher searcher = new IndexSearcher(reader);
  Query query = new MultiFieldQueryParser(OLuceneIndexManagerAbstract.LUCENE_VERSION, new String[] { "title" },
      new StandardAnalyzer(OLuceneIndexManagerAbstract.LUCENE_VERSION)).parse("down the");
  final TopDocs docs = searcher.search(query, Integer.MAX_VALUE);
  ScoreDoc[] hits = docs.scoreDocs;
  List<ODocument> oDocs = databaseDocumentTx.query(new OSQLSynchQuery<ODocument>(
      "select *,$score from Song where title LUCENE \"down the\""));
  Assert.assertEquals(oDocs.size(), hits.length);

  int i = 0;
  for (ScoreDoc hit : hits) {
    Assert.assertEquals(oDocs.get(i).field("$score"), hit.score);
    i++;
  }
  reader.close();

}
 
源代码8 项目: artifact-listener   文件: ArtifactDaoImpl.java
private FullTextQuery getSearchRecommendedQuery(String searchTerm) throws ServiceException {
	if (!StringUtils.hasText(searchTerm)) {
		return null;
	}
	FullTextEntityManager fullTextEntityManager = Search.getFullTextEntityManager(getEntityManager());
	
	QueryBuilder artifactQueryBuilder = fullTextEntityManager.getSearchFactory().buildQueryBuilder()
			.forEntity(Artifact.class).get();
	
	BooleanJunction<?> booleanJunction = artifactQueryBuilder.bool();
	
	booleanJunction.must(artifactQueryBuilder
			.keyword()
			.onField(Binding.artifact().deprecationStatus().getPath())
			.matching(ArtifactDeprecationStatus.NORMAL)
			.createQuery());
	
	try {
		searchTerm = LuceneUtils.getSimilarityQuery(searchTerm, 2);
		String[] fields = new String[] {
				Binding.artifact().artifactId().getPath(),
				Binding.artifact().group().groupId().getPath()
		};
		Analyzer analyzer = Search.getFullTextEntityManager(getEntityManager()).getSearchFactory().getAnalyzer(Artifact.class);
		
		MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, analyzer);
		parser.setDefaultOperator(MultiFieldQueryParser.AND_OPERATOR);

		BooleanQuery booleanQuery = new BooleanQuery();
		booleanQuery.add(parser.parse(searchTerm), BooleanClause.Occur.MUST);
		
		booleanJunction.must(booleanQuery);
	} catch (ParseException e) {
		throw new ServiceException(String.format("Error parsing request: %1$s", searchTerm), e);
	}
	
	return fullTextEntityManager.createFullTextQuery(booleanJunction.createQuery(), Artifact.class);
}
 
源代码9 项目: ml-blog   文件: LuceneService.java
public PageVo queryByPage(String keyword, int pageNum, int pageSize) throws GlobalException {

        try {
            IndexSearcher searcher = this.getIndexSearcher();
            Analyzer analyzer = new ComplexAnalyzer();

            String[] fields = {"title"};// 使用多域查询,便于以后扩展
            MultiFieldQueryParser multiFieldQueryParser = new MultiFieldQueryParser(fields, analyzer);
            Query query = multiFieldQueryParser.parse(keyword);

            TopDocs topDocs = searcher.search(query, 100);

            // 1.格式化对象,设置前缀和后缀
            Formatter formatter = new SimpleHTMLFormatter("<font color='red'>","</font>");
            // 2.关键词对象
            Scorer scorer = new QueryScorer(query);
            // 3. 高亮对象
            Highlighter highlighter = new Highlighter(formatter, scorer);

            List<Post> list = new ArrayList<>();
            Post post;
            ScoreDoc[] scoreDocs = topDocs.scoreDocs;

            if (pageNum < 1) {
                pageNum = 1;
            }

            if (scoreDocs.length == 0) {
                return new PageVo(pageNum,pageSize,scoreDocs.length,null);
            }

            int start = (pageNum - 1) * pageSize;
            int end = (int) Math.min(start + pageSize,topDocs.totalHits);

            for(int i=start; i<end; i++) {
                Document document = searcher.doc(scoreDocs[i].doc);
                if (Integer.parseInt(document.get("status")) == 1) {
                    post = new Post();
                    String titleHighLight  = highlighter.getBestFragment(analyzer,"title",document.get("title"));
                    post.setId(Integer.parseInt(document.get("postId")))
                            .setTitle(titleHighLight)
                            .setPostUrl(document.get("postUrl"))
                            .setCategoryName(document.get("categoryName"))
                            .setPublishDate(DateUtil.parseToDate(document.get("publishDate"), "yyyy-MM-dd"));
                    list.add(post);
                }
            }

            return new PageVo(pageNum,pageSize,scoreDocs.length,list);
        } catch (Exception e) {
            throw new GlobalException(500, e.toString());
        }
    }
 
源代码10 项目: scava   文件: SORecommender.java
@Override
public Recommendation getRecommendation(Query rec_query) {
	Recommendation rec = new Recommendation();
	try {

		String compUnit = "";
		if (rec_query.getSoRecommendationSelection() == null
				|| rec_query.getSoRecommendationSelection().isEmpty()) {
			Map<String, String> param = new HashMap<String, String>();
			param.put("ImportDeclaration", "OR");
			param.put("MethodDeclaration", "OR");
			param.put("MethodInvocation", "OR");
			param.put("VariableDeclaration", "OR");
			param.put("ClassInstance", "OR");
			param.put("VariableDeclarationType", "OR");
			compUnit = makeBoostedQuery(rec_query.getCompilationUnit(), param);
		} else
			compUnit = makeBoostedQuery(rec_query.getCompilationUnit(), rec_query.getSoRecommendationSelection());

		File indexDirectory = new File(INDEX_DIRECTORY);
		Directory indexDir = FSDirectory.open(Paths.get(indexDirectory.getAbsolutePath()));
		IndexReader reader = DirectoryReader.open(indexDir);
		IndexSearcher searcher = new IndexSearcher(reader);
		List<String> fields2 = getAllIndexTags(INDEX_DIRECTORY);
		String[] fields = new String[fields2.size()];
		fields = fields2.toArray(fields);
		Analyzer analzer = new StandardAnalyzer();
		MultiFieldQueryParser qp = new MultiFieldQueryParser(fields, analzer);
		org.apache.lucene.search.Query q = qp.parse(compUnit);
		TopDocs results = executeQuery(q);
		if (results != null) {
			int counter = 0;
			ArrayList<Explanation> expls = new ArrayList<Explanation>();
			ArrayList<String> Ids = new ArrayList<String>();
			for (ScoreDoc result : results.scoreDocs) {
				if (counter < luceneTreshold) {
					RecommendationItem ri = new RecommendationItem();
					org.apache.lucene.document.Document d = searcher.doc(result.doc);
					ri.setApiDocumentationLink(d.get("ID_POST"));
					expls.add(searcher.explain(q, result.doc));
					ri.setSignificance(result.score);
					Ids.add(d.get("ID_POST"));
					counter += 1;
					rec.getRecommendationItems().add(ri);
				}
			}
		}
	} catch (IOException | ParseException e) {
		logger.error(e.getMessage());
	}
	return rec;
}
 
@Override
public SearchResult search(io.gravitee.rest.api.service.search.query.Query query) throws TechnicalException {
    MultiFieldQueryParser apiParser = new MultiFieldQueryParser(new String[]{
            "id",
            "name",
            "name_lowercase",
            "name_split",
            "description",
            "ownerName",
            "ownerMail",
            "labels",
            "tags",
            "categories",
            "paths",
            "paths_split",
            "hosts",
            "hosts_split",
    }, analyzer, API_FIELD_BOOST);
    apiParser.setFuzzyMinSim(0.6f);
    apiParser.setAllowLeadingWildcard(true);

    QueryParser pageParser = new MultiFieldQueryParser(new String[]{
            "name",
            "content"
    }, analyzer, PAGE_FIELD_BOOST);
    pageParser.setFuzzyMinSim(0.6f);
    pageParser.setAllowLeadingWildcard(true);

    try {
        String inputQuery = QueryParserBase.escape(query.getQuery());
        Query parse = apiParser.parse(inputQuery);
        Query parsePage = pageParser.parse(inputQuery);

        Query apisFilter = getApisFilter(FIELD_ID, query.getFilters());

        // Search in API fields
        BooleanQuery.Builder apiQuery = new BooleanQuery.Builder();
        BooleanQuery.Builder apiFieldsQuery = new BooleanQuery.Builder();

        apiFieldsQuery.add(parse, BooleanClause.Occur.SHOULD);
        apiFieldsQuery.add(new WildcardQuery(new Term("name", '*' + query.getQuery() + '*')), BooleanClause.Occur.SHOULD);
        apiFieldsQuery.add(new WildcardQuery(new Term("name_lowercase", '*' + query.getQuery().toLowerCase() + '*')), BooleanClause.Occur.SHOULD);
        apiFieldsQuery.add(new WildcardQuery(new Term("paths", '*' + query.getQuery() + '*')), BooleanClause.Occur.SHOULD);
        apiFieldsQuery.add(new WildcardQuery(new Term("hosts", '*' + query.getQuery() + '*')), BooleanClause.Occur.SHOULD);

        apiQuery.add(apiFieldsQuery.build(), BooleanClause.Occur.MUST);
        apiQuery.add(new TermQuery(new Term(FIELD_TYPE, FIELD_API_TYPE_VALUE)), BooleanClause.Occur.MUST);
        if (apisFilter != null) {
            apiQuery.add(apisFilter, BooleanClause.Occur.MUST);
        }

        // Search in page fields
        BooleanQuery.Builder pageQuery = new BooleanQuery.Builder();
        pageQuery.add(parsePage, BooleanClause.Occur.MUST);
        pageQuery.add(new TermQuery(new Term(FIELD_TYPE, FIELD_PAGE_TYPE_VALUE)), BooleanClause.Occur.MUST);

        apisFilter = getApisFilter(FIELD_API_TYPE_VALUE, query.getFilters());
        if (apisFilter != null) {
            pageQuery.add(apisFilter, BooleanClause.Occur.MUST);
        } else {
            pageQuery.add(new DocValuesFieldExistsQuery(FIELD_API_TYPE_VALUE), BooleanClause.Occur.MUST);
        }

        BooleanQuery.Builder mainQuery = new BooleanQuery.Builder();
        mainQuery.add(new BoostQuery(apiQuery.build(), 2.0f), BooleanClause.Occur.SHOULD);
        //mainQuery.add(new BoostQuery(pathQuery.build(), 4.0f), BooleanClause.Occur.SHOULD);
        mainQuery.add(pageQuery.build(), BooleanClause.Occur.SHOULD);

        // Manage filters
        if (query.getFilters() != null) {
            BooleanQuery.Builder filtersQuery = new BooleanQuery.Builder();
            final boolean[] hasClause = {false};
            query.getFilters().forEach(new BiConsumer<String, Object>() {
                @Override
                public void accept(String field, Object value) {
                    if (Collection.class.isAssignableFrom(value.getClass())) {
                    } else {
                        filtersQuery.add(new TermQuery(new Term(field, QueryParserBase.escape((String) value))), BooleanClause.Occur.MUST);
                        hasClause[0] = true;
                    }
                }
            });

            if (hasClause[0]) {
                mainQuery.add(filtersQuery.build(), BooleanClause.Occur.MUST);
            }

        }
        return search(mainQuery.build());
    } catch (ParseException pe) {
        logger.error("Invalid query to search for API documents", pe);
        throw new TechnicalException("Invalid query to search for API documents", pe);
    }
}
 
源代码12 项目: wallride   文件: CategoryRepositoryImpl.java
@Override
public Page<Category> search(CategorySearchRequest request, Pageable pageable) {
	FullTextEntityManager fullTextEntityManager =  Search.getFullTextEntityManager(entityManager);
	QueryBuilder qb = fullTextEntityManager.getSearchFactory()
			.buildQueryBuilder()
			.forEntity(Category.class)
			.get();
	
	@SuppressWarnings("rawtypes")
	BooleanJunction<BooleanJunction> junction = qb.bool();
	junction.must(qb.all().createQuery());

	if (StringUtils.hasText(request.getKeyword())) {
		Analyzer analyzer = fullTextEntityManager.getSearchFactory().getAnalyzer("synonyms");
		String[] fields = new String[] {
				"name"
		};
		MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, analyzer);
		parser.setDefaultOperator(QueryParser.Operator.AND);
		Query query = null;
		try {
			query = parser.parse(request.getKeyword());
		}
		catch (ParseException e1) {
			try {
				query = parser.parse(QueryParser.escape(request.getKeyword()));
			}
			catch (ParseException e2) {
				throw new RuntimeException(e2);
			}
		}
		junction.must(query);
	}

	if (StringUtils.hasText(request.getLanguage())) {
		junction.must(qb.keyword().onField("language").matching(request.getLanguage()).createQuery());
	}

	Query searchQuery = junction.createQuery();
	
	Session session = (Session) entityManager.getDelegate();
	Criteria criteria = session.createCriteria(Category.class);

	Sort sort = new Sort(new SortField("sortName", SortField.Type.STRING));

	FullTextQuery persistenceQuery = fullTextEntityManager
			.createFullTextQuery(searchQuery, Category.class)
			.setCriteriaQuery(criteria)
			.setSort(sort);
	if (pageable.isPaged()) {
		persistenceQuery.setFirstResult((int) pageable.getOffset());
		persistenceQuery.setMaxResults(pageable.getPageSize());
	}

	int resultSize = persistenceQuery.getResultSize();

	@SuppressWarnings("unchecked")
	List<Category> results = persistenceQuery.getResultList();
	return new PageImpl<>(results, pageable, resultSize);
}
 
源代码13 项目: wallride   文件: UserRepositoryImpl.java
private FullTextQuery buildFullTextQuery(UserSearchRequest request, Pageable pageable, Criteria criteria) {
		FullTextEntityManager fullTextEntityManager =  Search.getFullTextEntityManager(entityManager);
		QueryBuilder qb = fullTextEntityManager.getSearchFactory()
				.buildQueryBuilder()
				.forEntity(User.class)
				.get();

		@SuppressWarnings("rawtypes")
		BooleanJunction<BooleanJunction> junction = qb.bool();
		junction.must(qb.all().createQuery());

		if (StringUtils.hasText(request.getKeyword())) {
			Analyzer analyzer = fullTextEntityManager.getSearchFactory().getAnalyzer("synonyms");
			String[] fields = new String[] {
					"loginId",
					"name.firstName", "name.lastName",
			};
			MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, analyzer);
			parser.setDefaultOperator(QueryParser.Operator.AND);
			Query query = null;
			try {
				query = parser.parse(request.getKeyword());
			}
			catch (ParseException e1) {
				try {
					query = parser.parse(QueryParser.escape(request.getKeyword()));
				}
				catch (ParseException e2) {
					throw new RuntimeException(e2);
				}
			}
			junction.must(query);
		}

		if (!CollectionUtils.isEmpty(request.getRoles())) {
			for (User.Role role : request.getRoles()) {
				junction.must(qb.keyword().onField("roles").matching(role).createQuery());
			}
		}

		Query searchQuery = junction.createQuery();

		Sort sort = new Sort(new SortField("sortId", SortField.Type.LONG, false));

		FullTextQuery persistenceQuery = fullTextEntityManager
				.createFullTextQuery(searchQuery, User.class)
				.setCriteriaQuery(criteria)
//				.setProjection("id")
				.setSort(sort);
		if (pageable.isPaged()) {
			persistenceQuery.setFirstResult((int) pageable.getOffset());
			persistenceQuery.setMaxResults(pageable.getPageSize());
		}
		return persistenceQuery;
	}
 
源代码14 项目: wallride   文件: TagRepositoryImpl.java
@Override
public Page<Tag> search(TagSearchRequest request, Pageable pageable) {
	FullTextEntityManager fullTextEntityManager =  Search.getFullTextEntityManager(entityManager);
	QueryBuilder qb = fullTextEntityManager.getSearchFactory()
			.buildQueryBuilder()
			.forEntity(Tag.class)
			.get();
	
	@SuppressWarnings("rawtypes")
	BooleanJunction<BooleanJunction> junction = qb.bool();
	junction.must(qb.all().createQuery());

	if (StringUtils.hasText(request.getKeyword())) {
		Analyzer analyzer = fullTextEntityManager.getSearchFactory().getAnalyzer("synonyms");
		String[] fields = new String[] {
				"name"
		};
		MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, analyzer);
		parser.setDefaultOperator(QueryParser.Operator.AND);
		Query query = null;
		try {
			query = parser.parse(request.getKeyword());
		}
		catch (ParseException e1) {
			try {
				query = parser.parse(QueryParser.escape(request.getKeyword()));
			}
			catch (ParseException e2) {
				throw new RuntimeException(e2);
			}
		}
		junction.must(query);
	}

	if (StringUtils.hasText(request.getLanguage())) {
		junction.must(qb.keyword().onField("language").matching(request.getLanguage()).createQuery());
	}

	Query searchQuery = junction.createQuery();
	
	Session session = (Session) entityManager.getDelegate();
	Criteria criteria = session.createCriteria(Tag.class);

	Sort sort = new Sort(new SortField("sortName", SortField.Type.STRING));

	FullTextQuery persistenceQuery = fullTextEntityManager
			.createFullTextQuery(searchQuery, Tag.class)
			.setCriteriaQuery(criteria)
			.setSort(sort);
	persistenceQuery.setFirstResult((int) pageable.getOffset());
	persistenceQuery.setMaxResults(pageable.getPageSize());

	int resultSize = persistenceQuery.getResultSize();

	@SuppressWarnings("unchecked")
	List<Tag> results = persistenceQuery.getResultList();
	return new PageImpl<>(results, pageable, resultSize);
}
 
源代码15 项目: wallride   文件: CustomFieldRepositoryImpl.java
public FullTextQuery buildFullTextQuery(CustomFieldSearchRequest request, Pageable pageable, Criteria criteria) {
	FullTextEntityManager fullTextEntityManager =  Search.getFullTextEntityManager(entityManager);
	QueryBuilder qb = fullTextEntityManager.getSearchFactory()
			.buildQueryBuilder()
			.forEntity(CustomField.class)
			.get();
	
	@SuppressWarnings("rawtypes")
	BooleanJunction<BooleanJunction> junction = qb.bool();
	junction.must(qb.all().createQuery());

	if (StringUtils.hasText(request.getKeyword())) {
		Analyzer analyzer = fullTextEntityManager.getSearchFactory().getAnalyzer("synonyms");
		String[] fields = new String[] {
				"name", "code", "description"
		};
		MultiFieldQueryParser parser = new MultiFieldQueryParser(fields, analyzer);
		parser.setDefaultOperator(QueryParser.Operator.AND);
		Query query = null;
		try {
			query = parser.parse(request.getKeyword());
		}
		catch (ParseException e1) {
			try {
				query = parser.parse(QueryParser.escape(request.getKeyword()));
			}
			catch (ParseException e2) {
				throw new RuntimeException(e2);
			}
		}
		junction.must(query);
	}

	if (StringUtils.hasText(request.getLanguage())) {
		junction.must(qb.keyword().onField("language").matching(request.getLanguage()).createQuery());
	}

	Query searchQuery = junction.createQuery();
	
	Sort sort = new Sort(new SortField("idx", SortField.Type.INT));

	FullTextQuery persistenceQuery = fullTextEntityManager
			.createFullTextQuery(searchQuery, CustomField.class)
			.setCriteriaQuery(criteria)
			.setSort(sort);
	if (pageable.isPaged()) {
		persistenceQuery.setFirstResult((int) pageable.getOffset());
		persistenceQuery.setMaxResults(pageable.getPageSize());
	}
	return persistenceQuery;
}