类org.hibernate.search.FullTextQuery源码实例Demo

下面列出了怎么用org.hibernate.search.FullTextQuery的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: document-management-system   文件: SearchDAO.java
/**
 * Security is evaluated by Lucene, so query result are already pruned. This means that every node
 * should have its security (user and role) info stored in Lucene. This provides very quick search
 * but security modifications need to be recursively applied to reach every document node in the
 * repository. This may take several hours (or days) is big repositories.
 */
@SuppressWarnings("unchecked")
private NodeResultSet runQueryLucene(FullTextSession ftSession, Query query, int offset, int limit)
		throws IOException, InvalidTokenOffsetsException, HibernateException, DatabaseException {
	log.debug("runQueryLucene({}, {}, {}, {})", new Object[]{ftSession, query, offset, limit});
	List<NodeQueryResult> results = new ArrayList<NodeQueryResult>();
	NodeResultSet result = new NodeResultSet();
	FullTextQuery ftq = ftSession.createFullTextQuery(query, NodeDocument.class, NodeFolder.class, NodeMail.class);
	ftq.setProjection(FullTextQuery.SCORE, FullTextQuery.THIS);
	ftq.enableFullTextFilter("readAccess");
	QueryScorer scorer = new QueryScorer(query, NodeDocument.TEXT_FIELD);

	// Set limits
	ftq.setFirstResult(offset);
	ftq.setMaxResults(limit);

	// Highlight using a CSS style
	SimpleHTMLFormatter formatter = new SimpleHTMLFormatter("<span class='highlight'>", "</span>");
	Highlighter highlighter = new Highlighter(formatter, scorer);
	highlighter.setTextFragmenter(new SimpleSpanFragmenter(scorer, MAX_FRAGMENT_LEN));

	for (Iterator<Object[]> it = ftq.iterate(); it.hasNext(); ) {
		Object[] qRes = it.next();
		Float score = (Float) qRes[0];
		NodeBase nBase = (NodeBase) qRes[1];

		// Add result
		addResult(ftSession, results, highlighter, score, nBase);
	}

	result.setTotal(ftq.getResultSize());
	result.setResults(results);
	log.debug("runQueryLucene: {}", result);
	return result;
}
 
源代码2 项目: development   文件: SearchServiceBean.java
/**
 * Performs a search in Lucene and puts the resulting product object ids in
 * a corresponding map.
 * 
 * @param query
 *            the Lucene query
 * @param fts
 *            the Hibernate Search FullTextSession
 * @param map
 *            the map for the search results
 * @throws HibernateException
 */
private void searchViaLucene(org.apache.lucene.search.Query query,
        FullTextSession fts, LinkedHashMap<Long, VOService> map)
        throws HibernateException {
    FullTextQuery ftQuery = fts.createFullTextQuery(query, Product.class);
    ftQuery.setProjection("key");
    List<?> result = ftQuery.list();
    if (result != null) {
        for (Object item : result) {
            map.put((Long) ((Object[]) item)[0], null);
        }
    }
}
 
源代码3 项目: webdsl   文件: AbstractEntitySearcher.java
public List<Float> scores( ){
    List<Float> toReturn = new ArrayList<Float>( );
    validateQuery( );
    fullTextQuery.setProjection( FullTextQuery.SCORE );
    for ( Object obj : fullTextQuery.list( ) ) {
        toReturn.add( ( Float ) ( (Object[] ) obj )[0] );
    };
    fullTextQuery.setProjection( FullTextQuery.THIS );
    return toReturn;

}
 
源代码4 项目: webdsl   文件: AbstractEntitySearcher.java
public List<String> explanations( ){
    List<String> toReturn = new ArrayList<String>( );
    validateQuery( );
    fullTextQuery.setProjection( FullTextQuery.EXPLANATION );
    for ( Object obj : fullTextQuery.list( ) ) {
        toReturn.add( ( (Explanation ) ( (Object[] ) obj )[0] ).toHtml( ) );
    };
    fullTextQuery.setProjection( FullTextQuery.THIS );
    return toReturn;

}
 
源代码5 项目: maven-framework-project   文件: SearchManager.java
public static void main(String[] args) throws Exception{
		ApplicationContext applicationContext=new ClassPathXmlApplicationContext("applicationContext.xml");
		SessionFactory sessionFactory = applicationContext.getBean("hibernate4sessionFactory",SessionFactory.class);
		FullTextSession fullTextSession = Search.getFullTextSession(sessionFactory.openSession());
		
		//使用Hibernate Search api查询 从多个字段匹配 name、description、authors.name
//		QueryBuilder qb = fullTextEntityManager.getSearchFactory().buildQueryBuilder().forEntity(Book.class ).get();
//		Query luceneQuery = qb.keyword().onFields("name","description","authors.name").matching("移动互联网").createQuery();
		
		//使用lucene api查询 从多个字段匹配 name、description、authors.name
		//使用庖丁分词器
		MultiFieldQueryParser queryParser=new MultiFieldQueryParser(Version.LUCENE_36, new String[]{"name","description","authors.name"}, new PaodingAnalyzer());
		Query luceneQuery=queryParser.parse("实战");
		
		FullTextQuery fullTextQuery =fullTextSession.createFullTextQuery(luceneQuery, Book.class);
		//设置每页显示多少条
		fullTextQuery.setMaxResults(5);
		//设置当前页
		fullTextQuery.setFirstResult(0);
		
		//高亮设置
		SimpleHTMLFormatter formatter=new SimpleHTMLFormatter("<b><font color='red'>", "<font/></b>");
		QueryScorer queryScorer=new QueryScorer(luceneQuery);
		Highlighter highlighter=new Highlighter(formatter, queryScorer);

		@SuppressWarnings("unchecked")
		List<Book> resultList = fullTextQuery.list();
		System.out.println("共查找到["+resultList.size()+"]条记录");
		for (Book book : resultList) {
			String highlighterString=null;
			Analyzer analyzer=new PaodingAnalyzer();
			try {
				//高亮name
				highlighterString=highlighter.getBestFragment(analyzer, "name", book.getName());
				if(highlighterString!=null){
					book.setName(highlighterString);
				}
				//高亮authors.name
				Set<Author> authors = book.getAuthors();
				for (Author author : authors) {
					highlighterString=highlighter.getBestFragment(analyzer, "authors.name", author.getName());
					if(highlighterString!=null){
						author.setName(highlighterString);
					}
				}
				//高亮description
				highlighterString=highlighter.getBestFragment(analyzer, "description", book.getDescription());
				if(highlighterString!=null){
					book.setDescription(highlighterString);
				}
			} catch (Exception e) {
			}
			
			System.out.println("书名:"+book.getName()+"\n描述:"+book.getDescription()+"\n出版日期:"+book.getPublicationDate());
			System.out.println("----------------------------------------------------------");
		}
		
		fullTextSession.close();
		sessionFactory.close();
		
	}
 
源代码6 项目: maven-framework-project   文件: BookDaoImpl.java
@Override
public QueryResult<Book> query(String keyword, int start, int pagesize,Analyzer analyzer,String...field) throws Exception{
	
	QueryResult<Book> queryResult=new QueryResult<Book>();
	
	List<Book> books=new ArrayList<Book>();
	
	FullTextSession fullTextSession = Search.getFullTextSession(getSession());
	
	//使用Hibernate Search api查询 从多个字段匹配 name、description、authors.name
	//QueryBuilder qb = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Book.class ).get();
	//Query luceneQuery = qb.keyword().onFields(field).matching(keyword).createQuery();

	//使用lucene api查询 从多个字段匹配 name、description、authors.name
	
	MultiFieldQueryParser queryParser=new MultiFieldQueryParser(Version.LUCENE_36,new String[]{"name","description","authors.name"}, analyzer);
	Query luceneQuery=queryParser.parse(keyword);
	
	FullTextQuery fullTextQuery = fullTextSession.createFullTextQuery(luceneQuery);
	int searchresultsize = fullTextQuery.getResultSize();
	queryResult.setSearchresultsize(searchresultsize);
	System.out.println("共查找到["+searchresultsize+"]条记录");
	
	fullTextQuery.setFirstResult(start);
	fullTextQuery.setMaxResults(pagesize);
	
	//设置按id排序
	fullTextQuery.setSort(new Sort(new SortField("id", SortField.INT ,true)));
	
	//高亮设置
	SimpleHTMLFormatter formatter=new SimpleHTMLFormatter("<b><font color='red'>", "</font></b>");
	QueryScorer queryScorer=new QueryScorer(luceneQuery);
	Highlighter highlighter=new Highlighter(formatter, queryScorer);

	@SuppressWarnings("unchecked")
	List<Book> tempresult = fullTextQuery.list();
	for (Book book : tempresult) {
		String highlighterString=null;
		try {
			//高亮name
			highlighterString=highlighter.getBestFragment(analyzer, "name", book.getName());
			if(highlighterString!=null){
				book.setName(highlighterString);
			}
			//高亮authors.name
			Set<Author> authors = book.getAuthors();
			for (Author author : authors) {
				highlighterString=highlighter.getBestFragment(analyzer, "authors.name", author.getName());
				if(highlighterString!=null){
					author.setName(highlighterString);
				}
			}
			//高亮description
			highlighterString=highlighter.getBestFragment(analyzer, "description", book.getDescription());
			if(highlighterString!=null){
				book.setDescription(highlighterString);
			}
		} catch (Exception e) {
		}
		
		books.add(book);
		
		
		System.out.println("书名:"+book.getName()+"\n描述:"+book.getDescription()+"\n出版日期:"+book.getPublicationDate());
		System.out.println("----------------------------------------------------------");
	}
	
	queryResult.setSearchresult(books);
	
	return queryResult;
}
 
源代码7 项目: projectforge-webapp   文件: BaseDao.java
@SuppressWarnings("unchecked")
private void getHistoryEntries(final Session session, final BaseSearchFilter filter, final Set<Integer> idSet, final Class< ? > clazz,
    final boolean searchStringInHistory)
{
  if (log.isDebugEnabled() == true) {
    log.debug("Searching in " + clazz);
  }
  // First get all history entries matching the filter and the given class.
  final String className = ClassUtils.getShortClassName(clazz);
  if (searchStringInHistory == true) {
    final StringBuffer buf = new StringBuffer();
    buf.append("(+className:").append(className);
    if (filter.getStartTimeOfModification() != null || filter.getStopTimeOfModification() != null) {
      final DateFormat df = new SimpleDateFormat(DateFormats.LUCENE_TIMESTAMP_MINUTE);
      df.setTimeZone(DateHelper.UTC);
      buf.append(" +timestamp:[");
      if (filter.getStartTimeOfModification() != null) {
        buf.append(df.format(filter.getStartTimeOfModification()));
      } else {
        buf.append("000000000000");
      }
      buf.append(" TO ");
      if (filter.getStopTimeOfModification() != null) {
        buf.append(df.format(filter.getStopTimeOfModification()));
      } else {
        buf.append("999999999999");
      }
      buf.append("]");
    }
    if (filter.getModifiedByUserId() != null) {
      buf.append(" +userName:").append(filter.getModifiedByUserId());
    }
    buf.append(") AND (");
    final String searchString = buf.toString() + modifySearchString(filter.getSearchString()) + ")";
    try {
      final FullTextSession fullTextSession = Search.getFullTextSession(getSession());
      final org.apache.lucene.search.Query query = createFullTextQuery(HISTORY_SEARCH_FIELDS, null, searchString);
      if (query == null) {
        // An error occured:
        return;
      }
      final FullTextQuery fullTextQuery = fullTextSession.createFullTextQuery(query, HistoryEntry.class);
      fullTextQuery.setCacheable(true);
      fullTextQuery.setCacheRegion("historyItemCache");
      fullTextQuery.setProjection("entityId");
      final List<Object[]> result = fullTextQuery.list();
      if (result != null && result.size() > 0) {
        for (final Object[] oa : result) {
          idSet.add((Integer) oa[0]);
        }
      }
    } catch (final Exception ex) {
      final String errorMsg = "Lucene error message: "
          + ex.getMessage()
          + " (for "
          + this.getClass().getSimpleName()
          + ": "
          + searchString
          + ").";
      filter.setErrorMessage(errorMsg);
      log.info(errorMsg);
    }
  } else {
    final Criteria criteria = session.createCriteria(HistoryEntry.class);
    setCacheRegion(criteria);
    criteria.add(Restrictions.eq("className", className));
    if (filter.getStartTimeOfModification() != null && filter.getStopTimeOfModification() != null) {
      criteria.add(Restrictions.between("timestamp", filter.getStartTimeOfModification(), filter.getStopTimeOfModification()));
    } else if (filter.getStartTimeOfModification() != null) {
      criteria.add(Restrictions.ge("timestamp", filter.getStartTimeOfModification()));
    } else if (filter.getStopTimeOfModification() != null) {
      criteria.add(Restrictions.le("timestamp", filter.getStopTimeOfModification()));
    }
    if (filter.getModifiedByUserId() != null) {
      criteria.add(Restrictions.eq("userName", filter.getModifiedByUserId().toString()));
    }
    criteria.setCacheable(true);
    criteria.setCacheRegion("historyItemCache");
    criteria.setProjection(Projections.property("entityId"));
    final List<Integer> idList = criteria.list();
    if (idList != null && idList.size() > 0) {
      for (final Integer id : idList) {
        idSet.add(id);
      }
    }
  }
}
 
 类所在包
 类方法
 同包方法