类org.hibernate.search.Search源码实例Demo

下面列出了怎么用org.hibernate.search.Search的API类实例代码及写法,或者点击链接到github查看源代码。

源代码1 项目: development   文件: IndexRequestMasterListener.java
private void handleListIndexing(
        Collection<? extends DomainObject<?>> list) {
    Session session = getSession();
    if (list == null || session == null) {
        return;
    }

    FullTextSession fts = Search.getFullTextSession(session);
    Transaction tx = fts.beginTransaction();

    for (DomainObject<?> obj : list) {
        if (obj != null) {
            fts.index(obj);
        }
    }

    tx.commit();
}
 
private void emptyProductIndex() throws Exception {
    runTX(new Callable<Void>() {

        @Override
        public Void call() throws Exception {
            Session session = dm.getSession();
            if (session != null) {
                FullTextSession fullTextSession = Search
                        .getFullTextSession(session);
                fullTextSession.purgeAll(Product.class);
            }

            return null;
        }
    });

}
 
private void emptySubscriptionIndex() throws Exception {
    runTX(new Callable<Void>() {

        @Override
        public Void call() throws Exception {
            Session session = dm.getSession();
            if (session != null) {
                FullTextSession fullTextSession = Search
                        .getFullTextSession(session);
                fullTextSession.purgeAll(Subscription.class);
            }

            return null;
        }
    });

}
 
源代码4 项目: projectforge-webapp   文件: DatabaseDao.java
/**
 * 
 * @param clazz
 */
private long reindexMassIndexer(final Class< ? > clazz)
{
  final Session session = getSession();
  final Criteria criteria = createCriteria(session, clazz, null, true);
  final Long number = (Long) criteria.uniqueResult(); // Get number of objects to re-index (select count(*) from).
  log.info("Starting (mass) re-indexing of " + number + " entries of type " + clazz.getName() + "...");
  final FullTextSession fullTextSession = Search.getFullTextSession(session);
  try {
    fullTextSession.createIndexer(clazz)//
    .batchSizeToLoadObjects(25) //
    //.cacheMode(CacheMode.NORMAL) //
    .threadsToLoadObjects(5) //
    //.threadsForIndexWriter(1) //
    .threadsForSubsequentFetching(20) //
    .startAndWait();
  } catch (final InterruptedException ex) {
    log.error("Exception encountered while reindexing: " + ex.getMessage(), ex);
  }
  final SearchFactory searchFactory = fullTextSession.getSearchFactory();
  searchFactory.optimize(clazz);
  log.info("Re-indexing of " + number + " objects of type " + clazz.getName() + " done.");
  return number;
}
 
源代码5 项目: projectforge-webapp   文件: BaseDao.java
/**
 * This method is for internal use e. g. for updating objects without check access.
 * @param obj
 * @return the generated identifier.
 */
@Transactional(readOnly = false, propagation = Propagation.REQUIRES_NEW, isolation = Isolation.REPEATABLE_READ)
public Serializable internalSave(final O obj)
{
  Validate.notNull(obj);
  obj.setCreated();
  obj.setLastUpdate();
  onSave(obj);
  onSaveOrModify(obj);
  final Session session = getHibernateTemplate().getSessionFactory().getCurrentSession();
  final Serializable id = session.save(obj);
  log.info("New object added (" + id + "): " + obj.toString());
  prepareHibernateSearch(obj, OperationType.INSERT);
  session.flush();
  Search.getFullTextSession(session).flushToIndexes();
  afterSaveOrModify(obj);
  afterSave(obj);
  return id;
}
 
源代码6 项目: projectforge-webapp   文件: BaseDao.java
@Transactional(readOnly = false, propagation = Propagation.REQUIRES_NEW, isolation = Isolation.REPEATABLE_READ)
public void internalMarkAsDeleted(final O obj)
{
  if (obj instanceof Historizable == false) {
    log.error("Object is not historizable. Therefore marking as deleted is not supported. Please use delete instead.");
    throw new InternalErrorException();
  }
  onDelete(obj);
  final O dbObj = getHibernateTemplate().load(clazz, obj.getId(), LockMode.PESSIMISTIC_WRITE);
  onSaveOrModify(obj);
  copyValues(obj, dbObj, "deleted"); // If user has made additional changes.
  dbObj.setDeleted(true);
  dbObj.setLastUpdate();
  final Session session = getHibernateTemplate().getSessionFactory().getCurrentSession();
  session.flush();
  Search.getFullTextSession(session).flushToIndexes();
  afterSaveOrModify(obj);
  afterDelete(obj);
  getSession().flush();
  log.info("Object marked as deleted: " + dbObj.toString());
}
 
源代码7 项目: projectforge-webapp   文件: BaseDao.java
@Transactional(readOnly = false, propagation = Propagation.REQUIRES_NEW, isolation = Isolation.REPEATABLE_READ)
public void internalUndelete(final O obj)
{
  final O dbObj = getHibernateTemplate().load(clazz, obj.getId(), LockMode.PESSIMISTIC_WRITE);
  onSaveOrModify(obj);
  copyValues(obj, dbObj, "deleted"); // If user has made additional changes.
  dbObj.setDeleted(false);
  obj.setDeleted(false);
  dbObj.setLastUpdate();
  obj.setLastUpdate(dbObj.getLastUpdate());
  log.info("Object undeleted: " + dbObj.toString());
  final Session session = getHibernateTemplate().getSessionFactory().getCurrentSession();
  session.flush();
  Search.getFullTextSession(session).flushToIndexes();
  afterSaveOrModify(obj);
  afterUndelete(obj);
}
 
/**
 * Do real indexes optimization.
 */
public static void optimizeIndexes() throws Exception {
	FullTextSession ftSession = null;
	Session session = null;

	if (optimizeIndexesRunning) {
		log.warn("*** Optimize indexes already running ***");
	} else {
		optimizeIndexesRunning = true;
		log.debug("*** Begin optimize indexes ***");

		try {
			session = HibernateUtil.getSessionFactory().openSession();
			ftSession = Search.getFullTextSession(session);

			// Optimize indexes
			SearchFactory searchFactory = ftSession.getSearchFactory();
			searchFactory.optimize();
		} catch (Exception e) {
			throw e;
		} finally {
			optimizeIndexesRunning = false;
			HibernateUtil.close(ftSession);
			HibernateUtil.close(session);
		}

		log.debug("*** End optimize indexes ***");
	}
}
 
源代码9 项目: development   文件: IndexRequestMasterListener.java
private void handleObjectIndexing(Object parameter) {

        Session session = getSession();
        if (parameter == null || session == null) {
            return;
        }

        FullTextSession fts = Search.getFullTextSession(session);
        Transaction tx = fts.beginTransaction();

        fts.index(parameter);

        tx.commit();
    }
 
源代码10 项目: wallride   文件: SystemService.java
@Async
@Transactional(propagation = Propagation.SUPPORTS)
public void reIndex() throws Exception {
	logger.info("Re-Index started");

	FullTextSession fullTextSession = Search.getFullTextSession((entityManager.unwrap(Session.class)));

	fullTextSession.setFlushMode(FlushMode.MANUAL);
	fullTextSession.setCacheMode(CacheMode.IGNORE);

	for (Class persistentClass : fullTextSession.getSearchFactory().getIndexedTypes()) {
		Transaction transaction = fullTextSession.beginTransaction();

		// Scrollable results will avoid loading too many objects in memory
		ScrollableResults results = fullTextSession.createCriteria(persistentClass)
				.setFetchSize(BATCH_SIZE)
				.scroll(ScrollMode.FORWARD_ONLY);
		int index = 0;
		while (results.next()) {
			index++;
			fullTextSession.index(results.get(0)); //index each element
			if (index % BATCH_SIZE == 0) {
				fullTextSession.flushToIndexes(); //apply changes to indexes
				fullTextSession.clear(); //free memory since the queue is processed
			}
		}
		transaction.commit();
	}
	logger.info("Re-Index finished");
}
 
源代码11 项目: maven-framework-project   文件: IndexManger.java
@Override
public void afterPropertiesSet() throws Exception {
	//重建索引
	FullTextSession fullTextSession = Search.getFullTextSession(sessionFactory.openSession());
	
	fullTextSession.createIndexer().startAndWait();
}
 
private void reindexDependents(final HibernateTemplate hibernateTemplate, final Session session, final BaseDO< ? > obj,
    final Set<String> alreadyReindexed)
{
  if (alreadyReindexed.contains(getReindexId(obj)) == true) {
    if (log.isDebugEnabled() == true) {
      log.debug("Object already re-indexed (skipping): " + getReindexId(obj));
    }
    return;
  }
  session.flush(); // Needed to flush the object changes!
  final FullTextSession fullTextSession = Search.getFullTextSession(session);
  fullTextSession.setFlushMode(FlushMode.AUTO);
  fullTextSession.setCacheMode(CacheMode.IGNORE);
  try {
    BaseDO< ? > dbObj = (BaseDO< ? >) session.get(obj.getClass(), obj.getId());
    if (dbObj == null) {
      dbObj = (BaseDO< ? >) session.load(obj.getClass(), obj.getId());
    }
    fullTextSession.index(dbObj);
    alreadyReindexed.add(getReindexId(dbObj));
    if (log.isDebugEnabled() == true) {
      log.debug("Object added to index: " + getReindexId(dbObj));
    }
  } catch (final Exception ex) {
    // Don't fail if any exception while re-indexing occurs.
    log.info("Fail to re-index " + obj.getClass() + ": " + ex.getMessage());
  }
  // session.flush(); // clear every batchSize since the queue is processed
  final List<Entry> entryList = map.get(obj.getClass());
  reindexDependents(hibernateTemplate, session, obj, entryList, alreadyReindexed);
}
 
/**
 * MassIndexer implementation.
 */
@SuppressWarnings("rawtypes")
private void luceneIndexesMassIndexer(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
	log.debug("luceneIndexesMassIndexer({}, {})", request, response);
	PrintWriter out = response.getWriter();
	response.setContentType(MimeTypeConfig.MIME_HTML);
	header(out, "Rebuild Lucene indexes", breadcrumb);
	out.flush();

	FullTextSession ftSession = null;
	Session session = null;

	// Activity log
	UserActivity.log(request.getRemoteUser(), "ADMIN_FORCE_REBUILD_INDEXES", null, null, null);

	try {
		Config.SYSTEM_MAINTENANCE = true;
		Config.SYSTEM_READONLY = true;
		out.println("<ul>");
		out.println("<li>System into maintenance mode</li>");
		FileLogger.info(BASE_NAME, "BEGIN - Rebuild Lucene indexes");

		session = HibernateUtil.getSessionFactory().openSession();
		ftSession = Search.getFullTextSession(session);
		long total = 0;

		// Calculate number of entities
		for (Class cls : classes) {
			String nodeType = cls.getSimpleName();
			out.println("<li>Calculate " + nodeType + "</li>");
			out.flush();
			long partial = NodeBaseDAO.getInstance().getCount(nodeType);
			FileLogger.info(BASE_NAME, "Number of {0}: {1}", nodeType, partial);
			out.println("<li>Number of " + nodeType + ": " + partial + "</li>");
			out.flush();
			total += partial;
		}

		// Rebuild indexes
		out.println("<li>Rebuilding indexes</li>");
		out.flush();
		ProgressMonitor monitor = new ProgressMonitor(out, "NodeBase", (int) total);
		ftSession.createIndexer()
				.batchSizeToLoadObjects(Config.HIBERNATE_INDEXER_BATCH_SIZE_LOAD_OBJECTS)
				.threadsForSubsequentFetching(Config.HIBERNATE_INDEXER_THREADS_SUBSEQUENT_FETCHING)
				.threadsToLoadObjects(Config.HIBERNATE_INDEXER_THREADS_LOAD_OBJECTS)
				.threadsForIndexWriter(Config.HIBERNATE_INDEXER_THREADS_INDEX_WRITER)
				.cacheMode(CacheMode.NORMAL) // defaults to CacheMode.IGNORE
				.progressMonitor(monitor).startAndWait();

		Config.SYSTEM_READONLY = false;
		Config.SYSTEM_MAINTENANCE = false;
		out.println("<li>System out of maintenance mode</li>");
		out.flush();

		// Finalized
		out.println("<li>Index rebuilding completed!</li>");
		out.println("</ul>");
		out.flush();
	} catch (Exception e) {
		FileLogger.error(BASE_NAME, StackTraceUtils.toString(e));
		out.println("<div class=\"warn\">Exception: " + e.getMessage() + "</div>");
		out.flush();
	} finally {
		Config.SYSTEM_READONLY = false;
		Config.SYSTEM_MAINTENANCE = false;
		HibernateUtil.close(ftSession);
		HibernateUtil.close(session);
	}

	// Finalized
	FileLogger.info(BASE_NAME, "END - Rebuild Lucene indexes");

	// End page
	footer(out);
	out.flush();
	out.close();

	log.debug("luceneIndexesMassIndexer: void");
}
 
源代码14 项目: document-management-system   文件: SearchDAO.java
/**
 * Get Lucent document terms.
 */
@SuppressWarnings("unchecked")
public List<String> getTerms(Class<?> entityType, String nodeUuid) throws CorruptIndexException, IOException {
	List<String> terms = new ArrayList<String>();
	FullTextSession ftSession = null;
	IndexSearcher searcher = null;
	ReaderProvider provider = null;
	Session session = null;
	IndexReader reader = null;

	try {
		session = HibernateUtil.getSessionFactory().openSession();
		ftSession = Search.getFullTextSession(session);
		SearchFactory sFactory = ftSession.getSearchFactory();
		provider = sFactory.getReaderProvider();
		QueryBuilder builder = sFactory.buildQueryBuilder().forEntity(entityType).get();
		Query query = builder.keyword().onField("uuid").matching(nodeUuid).createQuery();

		DirectoryProvider<Directory>[] dirProv = sFactory.getDirectoryProviders(NodeDocument.class);
		reader = provider.openReader(dirProv[0]);
		searcher = new IndexSearcher(reader);
		TopDocs topDocs = searcher.search(query, 1);

		for (ScoreDoc sDoc : topDocs.scoreDocs) {
			if (!reader.isDeleted(sDoc.doc)) {
				for (TermEnum te = reader.terms(); te.next(); ) {
					Term t = te.term();

					if ("text".equals(t.field())) {
						for (TermDocs tds = reader.termDocs(t); tds.next(); ) {
							if (sDoc.doc == tds.doc()) {
								terms.add(t.text());
								//log.info("Field: {} - {}", t.field(), t.text());
							}
						}
					}
				}
			}
		}
	} finally {
		if (provider != null && reader != null) {
			provider.closeReader(reader);
		}

		if (searcher != null) {
			searcher.close();
		}
		HibernateUtil.close(ftSession);
		HibernateUtil.close(session);
	}

	return terms;
}
 
源代码15 项目: development   文件: IndexRequestMasterListenerIT.java
private void assertDocsInIndex(final Class<?> clazz, final String comment,
        final int expectedNumDocs, final int expectedNumIndexedAttributes,
        final List<String> expectedAttributes) throws Exception {
    Boolean evaluationTookPlace = runTX(new Callable<Boolean>() {

        @Override
        public Boolean call() throws Exception {
            boolean evaluatedIndex = false;
            Session session = dm.getSession();
            if (session != null) {
                FullTextSession fullTextSession = Search
                        .getFullTextSession(session);
                SearchFactory searchFactory = fullTextSession
                        .getSearchFactory();
                IndexReader reader = searchFactory.getIndexReaderAccessor()
                        .open(clazz);

                try {
                    assertEquals(comment, expectedNumDocs,
                            reader.numDocs());
                    if (expectedNumDocs > 0) {
                        final FieldInfos indexedFieldNames = MultiFields
                                .getMergedFieldInfos(reader);
                        for (String expectedAttr : expectedAttributes) {
                            assertNotNull(
                                    "attribute " + expectedAttr
                                            + " does not exist in index: "
                                            + indexedFieldNames,
                                    indexedFieldNames
                                            .fieldInfo(expectedAttr));
                        }
                        assertNotNull(
                                "attribute \"key\" does not exist in index: "
                                        + indexedFieldNames,
                                indexedFieldNames.fieldInfo("key"));
                        assertNotNull(
                                "attribute \"_hibernate_class\" does not exist in index: "
                                        + indexedFieldNames,
                                indexedFieldNames
                                        .fieldInfo("_hibernate_class"));
                        assertEquals(
                                "More or less attributes indexed than expected, attributes retrieved from index: "
                                        + indexedFieldNames,
                                expectedNumIndexedAttributes + 2,
                                indexedFieldNames.size());
                        evaluatedIndex = true;
                    }
                } finally {
                    searchFactory.getIndexReaderAccessor().close(reader);
                }
            }

            return Boolean.valueOf(evaluatedIndex);
        }
    });

    if (expectedNumDocs > 0) {
        Assert.assertTrue("Index not found, no evaluation took place",
                evaluationTookPlace.booleanValue());
    }
}
 
public void contextInitialized(ServletContextEvent event) {
	
	event.getServletContext().setAttribute("mode", "slave");

	// For demonstration purposes, we will have about half the data created by the master node... and the other half created by 
	// the slave node.  After a few seconds, both nodes will refresh their local copies of the index using the overall master... and 
	// all of the App entities will be searchable from either node.  
	FullTextSession fullTextSession = Search.getFullTextSession( openSession() );
	fullTextSession.beginTransaction();
	
	//
	// Get references to the 5 devices, which should have already been populated in the database by the master node
	//
	Device xPhone = (Device) fullTextSession.createQuery( "from Device as device where device.name = ?" ).setString(0, "xPhone").uniqueResult();
	Device xTablet = (Device) fullTextSession.createQuery( "from Device as device where device.name = ?" ).setString(0, "xTablet").uniqueResult();
	Device solarSystem = (Device) fullTextSession.createQuery( "from Device as device where device.name = ?" ).setString(0, "Solar System Phone").uniqueResult();
	Device flame = (Device) fullTextSession.createQuery( "from Device as device where device.name = ?" ).setString(0, "Flame Book Reader").uniqueResult();
	Device pc = (Device) fullTextSession.createQuery( "from Device as device where device.name = ?" ).setString(0, "Personal Computer").uniqueResult();
	
	//
	// Create and persist the remaining 6 of 12 apps with devices and customer reviews
	//		
	App frustratedFlamingos = new App(
			"Frustrated Flamingos", 
			"flamingo.jpg", 
			"A fun little game app, where you throw large birds around for no apparent reason.  Why else do you think they're so frustrated?",
			"Games",
			0.99f);
	frustratedFlamingos.setSupportedDevices( new HashSet<Device>(Arrays.asList(new Device[] { xPhone, xTablet, solarSystem, flame, pc })) );
	CustomerReview frustratedFlamingosReview = new CustomerReview("BirdSlinger", 4, "LOL, I love catapulting the flamingos into the cows!  I hate how the advertisement banner hides part of the view, tho.");
	frustratedFlamingos.setCustomerReviews( new HashSet<CustomerReview>(Arrays.asList(new CustomerReview[] { frustratedFlamingosReview })) );
	fullTextSession.save(frustratedFlamingos);
	logger.info("Persisting " + frustratedFlamingos.getName());
	
	App grype = new App(
			"Grype Video Conferencing", 
			"laptop.jpg", 
			"Make free local and international calls, with video, using this app and your home Internet connection.  Better yet, make free calls using your employer's Internet connection!",
			"Internet",
			3.99f);
	grype.setSupportedDevices( new HashSet<Device>(Arrays.asList(new Device[] { xPhone, xTablet, solarSystem, pc })) );
	CustomerReview grypeReview = new CustomerReview("office.casual", 4, "I wish they had not added video to this app in the latest version.  I liked it much more back when I didn't have to get dressed.");
	grype.setCustomerReviews( new HashSet<CustomerReview>(Arrays.asList(new CustomerReview[] { grypeReview })) );
	fullTextSession.save(grype);
	logger.info("Persisting " + grype.getName());
	
	App eReader = new App(
			"E-Book Reader", 
			"book.jpg", 
			"Read books on your computer, or on the go from your mobile device with this powerful e-reader app.  We recommend \"Hibernate Search by Example\", from Packt Publishing.",
			"Media",
			1.99f);
	eReader.setSupportedDevices( new HashSet<Device>(Arrays.asList(new Device[] { xPhone, xTablet, solarSystem, flame, pc })) );
	CustomerReview eReaderReview = new CustomerReview("StevePerkins", 5, "This 'Hibernate Search by Example' book is brilliant!  Thanks for the recommendation!");
	eReader.setCustomerReviews( new HashSet<CustomerReview>(Arrays.asList(new CustomerReview[] { eReaderReview })) );
	fullTextSession.save(eReader);
	logger.info("Persisting " + eReader.getName());
	
	App domeBrowser = new App(
			"Dome Web Browser", 
			"orangeswirls.jpg", 
			"This amazing app allows us to track all of your online activity.  We can figure out where you live, what you had for breakfast this morning, or what your closest secrets are.  The app also includes a web browser.",
			"Internet",
			0);
	domeBrowser.setSupportedDevices( new HashSet<Device>(Arrays.asList(new Device[] { solarSystem, flame, pc })) );
	CustomerReview domeBrowserReview = new CustomerReview("TinFoilHat", 1, "I uninstalled this app.  If the government would fake a moon landing, then they would definately use my browser history to come after me.");
	domeBrowser.setCustomerReviews( new HashSet<CustomerReview>(Arrays.asList(new CustomerReview[] { domeBrowserReview })) );
	fullTextSession.save(domeBrowser);
	logger.info("Persisting " + domeBrowser.getName());
	
	App athenaRadio = new App(
			"Athena Internet Radio", 
			"jamming.jpg", 
			"Listen to your favorite songs on streaming Internet radio!  When you like a song, this app will play more songs similar to that one.  Or at least it plays more songs... to be honest, sometimes they're not all that similar.  :(",
			"Media",
			3.99f);
	athenaRadio.setSupportedDevices( new HashSet<Device>(Arrays.asList(new Device[] { xPhone, xTablet, solarSystem, flame, pc })) );
	CustomerReview athenaRadioReview = new CustomerReview("lskinner", 5, "I requested 'Free Bird', and this app played 'Free Bird'.  What's not to like?");
	athenaRadio.setCustomerReviews( new HashSet<CustomerReview>(Arrays.asList(new CustomerReview[] { athenaRadioReview })) );
	fullTextSession.save(athenaRadio);
	logger.info("Persisting " + athenaRadio.getName());
	
	App mapJourney = new App(
			"Map Journey", 
			"compass.jpg", 
			"Do you need directions to help you reach a destination?  This GPS app will definitely produce enough turn-by-turn directions to get you there!  Eventually.",
			"Travel",
			0.99f);
	mapJourney.setSupportedDevices( new HashSet<Device>(Arrays.asList(new Device[] { xPhone, solarSystem, pc })) );
	CustomerReview mapJourneyReview = new CustomerReview("LostInSpace", 3, "Not great... but still WAY better than Orange maps.");
	mapJourney.setCustomerReviews( new HashSet<CustomerReview>(Arrays.asList(new CustomerReview[] { mapJourneyReview })) );
	fullTextSession.save(mapJourney);
	logger.info("Persisting " + mapJourney.getName());
	
	//
	// Close and cleanup the Hibernate session
	//
	fullTextSession.getTransaction().commit();
	fullTextSession.close();
	
}
 
源代码17 项目: maven-framework-project   文件: SearchServlet.java
/**
 * This method contains the primary search functionality for this servlet, and is automatically invoked once for every HTTP
 * POST to the mapped URL. 
 */
@SuppressWarnings("unchecked")
@Override	
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
	
	Logger logger = LoggerFactory.getLogger(SearchServlet.class);
	
	// Get the user's search keyword(s) from CGI variables
	String searchString = request.getParameter("searchString");
	logger.info("Received searchString [" + searchString + "]");

	// Start a Hibernate session.
	Session session = StartupDataLoader.openSession();
	
	// Create a Hibernate Search wrapper around the vanilla Hibernate session
	FullTextSession fullTextSession = Search.getFullTextSession(session);

	// Begin a transaction.  This may not be strictly necessary, but is a good practice in general.
	fullTextSession.beginTransaction();

	// Create a Hibernate Search QueryBuilder for the appropriate Lucene index (i.e. the index for "App" in this case)
	QueryBuilder queryBuilder = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity( App.class ).get();
	
	// Use the QueryBuilder to construct a Lucene keyword query... matching the user's search keywords against the "name" 
	// and "description" fields of App, as well as "name" field of associated Device entities, and the "comments" field of
	// embedded CustomerReview objects.
	org.apache.lucene.search.Query luceneQuery = queryBuilder
		.keyword()
		.onFields("name", "description", "supportedDevices.name", "customerReviews.comments")
		.matching(searchString)
		.createQuery();
	org.hibernate.Query hibernateQuery = fullTextSession.createFullTextQuery(luceneQuery, App.class);
	
	List<App> apps = hibernateQuery.list();
	logger.info("Found " + apps.size() + " apps");

	// Detach the results from the Hibernate session (to prevent unwanted interaction between the view layer 
	// and Hibernate when associated devices or embedded customer reviews are referenced)
	fullTextSession.clear();

	// Put the search results on the HTTP reqeust object
	request.setAttribute("apps", apps);

	// Close and clean up the Hibernate session
	fullTextSession.getTransaction().commit();
	session.close();
	
	// Forward the request object (including the search results) to the JSP/JSTL view for rendering
	getServletContext().getRequestDispatcher("/WEB-INF/pages/search.jsp").forward(request, response);
}
 
源代码18 项目: maven-framework-project   文件: SearchServlet.java
/**
 * This method contains the primary search functionality for this servlet, and is automatically invoked once for every HTTP
 * POST to the mapped URL. 
 */
@SuppressWarnings("unchecked")
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {

	Logger logger = LoggerFactory.getLogger(SearchServlet.class);
	
	// Get the user's search keyword(s) from CGI variables
	String searchString = request.getParameter("searchString");
	logger.info("Received searchString [" + searchString + "]");

	// Start a Hibernate session.
	Session session = StartupDataLoader.openSession();
	
	// Create a Hibernate Search wrapper around the vanilla Hibernate session
	FullTextSession fullTextSession = Search.getFullTextSession(session);
	
	// Begin a transaction.  This may not be strictly necessary, but is a good practice in general.
	fullTextSession.beginTransaction();

	// Create a Hibernate Search QueryBuilder for the appropriate Lucene index (i.e. the index for "App" in this case)
	QueryBuilder queryBuilder = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity( App.class ).get();
	
	// Use the QueryBuilder to construct a Lucene keyword query, matching the user's search keywords against the name 
	// and description fields of App.
	org.apache.lucene.search.Query luceneQuery = queryBuilder
		.keyword()
		.onFields("name", "description")
		.matching(searchString)
		.createQuery();
	org.hibernate.Query hibernateQuery = fullTextSession.createFullTextQuery(luceneQuery, App.class);
	
	// Perform the search query, and put its results on the HTTP request object
	List<App> apps = 	hibernateQuery.list();
	logger.info("Found " + apps.size() + " search results");
	request.setAttribute("apps", apps);
    
	// Close and clean up the Hibernate session
	fullTextSession.getTransaction().commit();
	session.close();
	
	// Forward the request object (including the search results) to the JSP/JSTL view for rendering
	getServletContext().getRequestDispatcher("/WEB-INF/pages/search.jsp").forward(request, response);
}
 
源代码19 项目: maven-framework-project   文件: SearchServlet.java
/**
 * This method contains the primary search functionality for this servlet, and is automatically invoked once for every HTTP
 * POST to the mapped URL. 
 */
@SuppressWarnings("unchecked")
@Override	
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
	
	Logger logger = LoggerFactory.getLogger(SearchServlet.class);
	
	// Get the user's search keyword(s) from CGI variables
	String searchString = request.getParameter("searchString");
	logger.info("Received searchString [" + searchString + "]");

	// Start a Hibernate session.
	Session session = StartupDataLoader.openSession();
	
	// Create a Hibernate Search wrapper around the vanilla Hibernate session
	FullTextSession fullTextSession = Search.getFullTextSession(session);

	// Begin a transaction.  This may not be strictly necessary, but is a good practice in general.
	fullTextSession.beginTransaction();

	// Create a Hibernate Search QueryBuilder for the appropriate Lucene index (i.e. the index for "App" in this case)
	QueryBuilder queryBuilder = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity( App.class ).get();
	
	// Use the QueryBuilder to construct a Lucene keyword query... matching the user's search keywords against the "name" 
	// and "description" fields of App, as well as "name" field of associated Device entities, and the "comments" field of
	// embedded CustomerReview objects.
	org.apache.lucene.search.Query luceneQuery = queryBuilder
		.keyword()
		.onFields("name", "description", "supportedDevices.name", "customerReviews.comments")
		.matching(searchString)
		.createQuery();
	org.hibernate.Query hibernateQuery = fullTextSession.createFullTextQuery(luceneQuery, App.class);
	
	List<App> apps = hibernateQuery.list();
	logger.info("Found " + apps.size() + " apps");

	// Detach the results from the Hibernate session (to prevent unwanted interaction between the view layer 
	// and Hibernate when associated devices or embedded customer reviews are referenced)
	fullTextSession.clear();

	// Put the search results on the HTTP reqeust object
	request.setAttribute("apps", apps);

	// Close and clean up the Hibernate session
	fullTextSession.getTransaction().commit();
	session.close();
	
	// Forward the request object (including the search results) to the JSP/JSTL view for rendering
	getServletContext().getRequestDispatcher("/WEB-INF/pages/search.jsp").forward(request, response);
}
 
源代码20 项目: maven-framework-project   文件: SearchServlet.java
/**
 * This method contains the primary search functionality for this servlet, and is automatically invoked once for every HTTP
 * POST to the mapped URL. 
 */
@SuppressWarnings("unchecked")
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {

	Logger logger = LoggerFactory.getLogger(SearchServlet.class);
	
	// Get the user's search keyword(s) from CGI variables
	String searchString = request.getParameter("searchString");
	logger.info("Received searchString [" + searchString + "]");

	// Start a Hibernate session.
	Session session = StartupDataLoader.openSession();
	
	// Create a Hibernate Search wrapper around the vanilla Hibernate session
	FullTextSession fullTextSession = Search.getFullTextSession(session);
	
	// Begin a transaction.  This may not be strictly necessary, but is a good practice in general.
	fullTextSession.beginTransaction();

	// Create a Hibernate Search QueryBuilder for the appropriate Lucene index (i.e. the index for "App" in this case)
	QueryBuilder queryBuilder = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity( App.class ).get();
	
	// Use the QueryBuilder to construct a Lucene keyword query, matching the user's search keywords against the name 
	// and description fields of App.
	org.apache.lucene.search.Query luceneQuery = queryBuilder
		.keyword()
		.onFields("name", "description")
		.matching(searchString)
		.createQuery();
	org.hibernate.Query hibernateQuery = fullTextSession.createFullTextQuery(luceneQuery, App.class);
	
	// Perform the search query, and put its results on the HTTP request object
	List<App> apps = 	hibernateQuery.list();
	logger.info("Found " + apps.size() + " search results");
	request.setAttribute("apps", apps);
    
	// Close and clean up the Hibernate session
	fullTextSession.getTransaction().commit();
	session.close();
	
	// Forward the request object (including the search results) to the JSP/JSTL view for rendering
	getServletContext().getRequestDispatcher("/WEB-INF/pages/search.jsp").forward(request, response);
}
 
源代码21 项目: maven-framework-project   文件: SearchServlet.java
/**
 * This method contains the primary search functionality for this servlet, and is automatically invoked once for every HTTP
 * POST to the mapped URL. 
 */
@SuppressWarnings("unchecked")
@Override	
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
	
	Logger logger = LoggerFactory.getLogger(SearchServlet.class);
	
	// Get the user's search keyword(s) from CGI variables
	String searchString = request.getParameter("searchString");
	logger.info("Received searchString [" + searchString + "]");

	// Start a Hibernate session.
	Session session = StartupDataLoader.openSession();
	
	// Create a Hibernate Search wrapper around the vanilla Hibernate session
	FullTextSession fullTextSession = Search.getFullTextSession(session);

	// Begin a transaction.  This may not be strictly necessary, but is a good practice in general.
	fullTextSession.beginTransaction();

	// Create a Hibernate Search QueryBuilder for the appropriate Lucene index (i.e. the index for "App" in this case)
	QueryBuilder queryBuilder = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity( App.class ).get();
	
	// Use the QueryBuilder to construct a Lucene keyword query... matching the user's search keywords against the "name" 
	// and "description" fields of App, as well as "name" field of associated Device entities, and the "comments" field of
	// embedded CustomerReview objects.
	org.apache.lucene.search.Query luceneQuery = queryBuilder
		.keyword()
		.onFields("name", "description", "supportedDevices.name", "customerReviews.comments")
		.matching(searchString)
		.createQuery();
	org.hibernate.Query hibernateQuery = fullTextSession.createFullTextQuery(luceneQuery, App.class);
	
	List<App> apps = hibernateQuery.list();
	logger.info("Found " + apps.size() + " apps");

	// Detach the results from the Hibernate session (to prevent unwanted interaction between the view layer 
	// and Hibernate when associated devices or embedded customer reviews are referenced)
	fullTextSession.clear();

	// Put the search results on the HTTP reqeust object
	request.setAttribute("apps", apps);

	// Close and clean up the Hibernate session
	fullTextSession.getTransaction().commit();
	session.close();
	
	// Forward the request object (including the search results) to the JSP/JSTL view for rendering
	getServletContext().getRequestDispatcher("/WEB-INF/pages/search.jsp").forward(request, response);
}
 
源代码22 项目: maven-framework-project   文件: SearchManager.java
public static void main(String[] args) throws Exception{
		ApplicationContext applicationContext=new ClassPathXmlApplicationContext("applicationContext.xml");
		SessionFactory sessionFactory = applicationContext.getBean("hibernate4sessionFactory",SessionFactory.class);
		FullTextSession fullTextSession = Search.getFullTextSession(sessionFactory.openSession());
		
		//使用Hibernate Search api查询 从多个字段匹配 name、description、authors.name
//		QueryBuilder qb = fullTextEntityManager.getSearchFactory().buildQueryBuilder().forEntity(Book.class ).get();
//		Query luceneQuery = qb.keyword().onFields("name","description","authors.name").matching("移动互联网").createQuery();
		
		//使用lucene api查询 从多个字段匹配 name、description、authors.name
		//使用庖丁分词器
		MultiFieldQueryParser queryParser=new MultiFieldQueryParser(Version.LUCENE_36, new String[]{"name","description","authors.name"}, new PaodingAnalyzer());
		Query luceneQuery=queryParser.parse("实战");
		
		FullTextQuery fullTextQuery =fullTextSession.createFullTextQuery(luceneQuery, Book.class);
		//设置每页显示多少条
		fullTextQuery.setMaxResults(5);
		//设置当前页
		fullTextQuery.setFirstResult(0);
		
		//高亮设置
		SimpleHTMLFormatter formatter=new SimpleHTMLFormatter("<b><font color='red'>", "<font/></b>");
		QueryScorer queryScorer=new QueryScorer(luceneQuery);
		Highlighter highlighter=new Highlighter(formatter, queryScorer);

		@SuppressWarnings("unchecked")
		List<Book> resultList = fullTextQuery.list();
		System.out.println("共查找到["+resultList.size()+"]条记录");
		for (Book book : resultList) {
			String highlighterString=null;
			Analyzer analyzer=new PaodingAnalyzer();
			try {
				//高亮name
				highlighterString=highlighter.getBestFragment(analyzer, "name", book.getName());
				if(highlighterString!=null){
					book.setName(highlighterString);
				}
				//高亮authors.name
				Set<Author> authors = book.getAuthors();
				for (Author author : authors) {
					highlighterString=highlighter.getBestFragment(analyzer, "authors.name", author.getName());
					if(highlighterString!=null){
						author.setName(highlighterString);
					}
				}
				//高亮description
				highlighterString=highlighter.getBestFragment(analyzer, "description", book.getDescription());
				if(highlighterString!=null){
					book.setDescription(highlighterString);
				}
			} catch (Exception e) {
			}
			
			System.out.println("书名:"+book.getName()+"\n描述:"+book.getDescription()+"\n出版日期:"+book.getPublicationDate());
			System.out.println("----------------------------------------------------------");
		}
		
		fullTextSession.close();
		sessionFactory.close();
		
	}
 
源代码23 项目: maven-framework-project   文件: BookDaoImpl.java
@Override
public QueryResult<Book> query(String keyword, int start, int pagesize,Analyzer analyzer,String...field) throws Exception{
	
	QueryResult<Book> queryResult=new QueryResult<Book>();
	
	List<Book> books=new ArrayList<Book>();
	
	FullTextSession fullTextSession = Search.getFullTextSession(getSession());
	
	//使用Hibernate Search api查询 从多个字段匹配 name、description、authors.name
	//QueryBuilder qb = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity(Book.class ).get();
	//Query luceneQuery = qb.keyword().onFields(field).matching(keyword).createQuery();

	//使用lucene api查询 从多个字段匹配 name、description、authors.name
	
	MultiFieldQueryParser queryParser=new MultiFieldQueryParser(Version.LUCENE_36,new String[]{"name","description","authors.name"}, analyzer);
	Query luceneQuery=queryParser.parse(keyword);
	
	FullTextQuery fullTextQuery = fullTextSession.createFullTextQuery(luceneQuery);
	int searchresultsize = fullTextQuery.getResultSize();
	queryResult.setSearchresultsize(searchresultsize);
	System.out.println("共查找到["+searchresultsize+"]条记录");
	
	fullTextQuery.setFirstResult(start);
	fullTextQuery.setMaxResults(pagesize);
	
	//设置按id排序
	fullTextQuery.setSort(new Sort(new SortField("id", SortField.INT ,true)));
	
	//高亮设置
	SimpleHTMLFormatter formatter=new SimpleHTMLFormatter("<b><font color='red'>", "</font></b>");
	QueryScorer queryScorer=new QueryScorer(luceneQuery);
	Highlighter highlighter=new Highlighter(formatter, queryScorer);

	@SuppressWarnings("unchecked")
	List<Book> tempresult = fullTextQuery.list();
	for (Book book : tempresult) {
		String highlighterString=null;
		try {
			//高亮name
			highlighterString=highlighter.getBestFragment(analyzer, "name", book.getName());
			if(highlighterString!=null){
				book.setName(highlighterString);
			}
			//高亮authors.name
			Set<Author> authors = book.getAuthors();
			for (Author author : authors) {
				highlighterString=highlighter.getBestFragment(analyzer, "authors.name", author.getName());
				if(highlighterString!=null){
					author.setName(highlighterString);
				}
			}
			//高亮description
			highlighterString=highlighter.getBestFragment(analyzer, "description", book.getDescription());
			if(highlighterString!=null){
				book.setDescription(highlighterString);
			}
		} catch (Exception e) {
		}
		
		books.add(book);
		
		
		System.out.println("书名:"+book.getName()+"\n描述:"+book.getDescription()+"\n出版日期:"+book.getPublicationDate());
		System.out.println("----------------------------------------------------------");
	}
	
	queryResult.setSearchresult(books);
	
	return queryResult;
}
 
源代码24 项目: projectforge-webapp   文件: BaseDao.java
/**
 * This method is for internal use e. g. for updating objects without check access.<br/>
 * Please note: update ignores the field deleted. Use markAsDeleted, delete and undelete methods instead.
 * @param obj
 * @param checkAccess If false, any access check will be ignored.
 * @return true, if modifications were done, false if no modification detected.
 */
@Transactional(readOnly = false, propagation = Propagation.REQUIRED, isolation = Isolation.REPEATABLE_READ)
public ModificationStatus internalUpdate(final O obj, final boolean checkAccess)
{
  onSaveOrModify(obj);
  if (checkAccess == true) {
    accessChecker.checkRestrictedOrDemoUser();
  }
  final O dbObj = getHibernateTemplate().load(clazz, obj.getId(), LockMode.PESSIMISTIC_WRITE);
  if (checkAccess == true) {
    checkLoggedInUserUpdateAccess(obj, dbObj);
  }
  onChange(obj, dbObj);
  final O dbObjBackup;
  if (supportAfterUpdate == true) {
    dbObjBackup = getBackupObject(dbObj);
  } else {
    dbObjBackup = null;
  }
  final boolean wantsReindexAllDependentObjects = wantsReindexAllDependentObjects(obj, dbObj);
  // Copy all values of modified user to database object, ignore field 'deleted'.
  final ModificationStatus result = copyValues(obj, dbObj, "deleted");
  if (result != ModificationStatus.NONE) {
    dbObj.setLastUpdate();
    log.info("Object updated: " + dbObj.toString());
  } else {
    log.info("No modifications detected (no update needed): " + dbObj.toString());
  }
  prepareHibernateSearch(obj, OperationType.UPDATE);
  final Session session = getHibernateTemplate().getSessionFactory().getCurrentSession();
  session.flush();
  Search.getFullTextSession(session).flushToIndexes();
  afterSaveOrModify(obj);
  if (supportAfterUpdate == true) {
    afterUpdate(obj, dbObjBackup, result != ModificationStatus.NONE);
    afterUpdate(obj, dbObjBackup);
  } else {
    afterUpdate(obj, null, result != ModificationStatus.NONE);
    afterUpdate(obj, null);
  }
  if (wantsReindexAllDependentObjects == true) {
    reindexDependentObjects(obj);
  }
  return result;
}
 
源代码25 项目: projectforge-webapp   文件: BaseDao.java
@SuppressWarnings("unchecked")
private void getHistoryEntries(final Session session, final BaseSearchFilter filter, final Set<Integer> idSet, final Class< ? > clazz,
    final boolean searchStringInHistory)
{
  if (log.isDebugEnabled() == true) {
    log.debug("Searching in " + clazz);
  }
  // First get all history entries matching the filter and the given class.
  final String className = ClassUtils.getShortClassName(clazz);
  if (searchStringInHistory == true) {
    final StringBuffer buf = new StringBuffer();
    buf.append("(+className:").append(className);
    if (filter.getStartTimeOfModification() != null || filter.getStopTimeOfModification() != null) {
      final DateFormat df = new SimpleDateFormat(DateFormats.LUCENE_TIMESTAMP_MINUTE);
      df.setTimeZone(DateHelper.UTC);
      buf.append(" +timestamp:[");
      if (filter.getStartTimeOfModification() != null) {
        buf.append(df.format(filter.getStartTimeOfModification()));
      } else {
        buf.append("000000000000");
      }
      buf.append(" TO ");
      if (filter.getStopTimeOfModification() != null) {
        buf.append(df.format(filter.getStopTimeOfModification()));
      } else {
        buf.append("999999999999");
      }
      buf.append("]");
    }
    if (filter.getModifiedByUserId() != null) {
      buf.append(" +userName:").append(filter.getModifiedByUserId());
    }
    buf.append(") AND (");
    final String searchString = buf.toString() + modifySearchString(filter.getSearchString()) + ")";
    try {
      final FullTextSession fullTextSession = Search.getFullTextSession(getSession());
      final org.apache.lucene.search.Query query = createFullTextQuery(HISTORY_SEARCH_FIELDS, null, searchString);
      if (query == null) {
        // An error occured:
        return;
      }
      final FullTextQuery fullTextQuery = fullTextSession.createFullTextQuery(query, HistoryEntry.class);
      fullTextQuery.setCacheable(true);
      fullTextQuery.setCacheRegion("historyItemCache");
      fullTextQuery.setProjection("entityId");
      final List<Object[]> result = fullTextQuery.list();
      if (result != null && result.size() > 0) {
        for (final Object[] oa : result) {
          idSet.add((Integer) oa[0]);
        }
      }
    } catch (final Exception ex) {
      final String errorMsg = "Lucene error message: "
          + ex.getMessage()
          + " (for "
          + this.getClass().getSimpleName()
          + ": "
          + searchString
          + ").";
      filter.setErrorMessage(errorMsg);
      log.info(errorMsg);
    }
  } else {
    final Criteria criteria = session.createCriteria(HistoryEntry.class);
    setCacheRegion(criteria);
    criteria.add(Restrictions.eq("className", className));
    if (filter.getStartTimeOfModification() != null && filter.getStopTimeOfModification() != null) {
      criteria.add(Restrictions.between("timestamp", filter.getStartTimeOfModification(), filter.getStopTimeOfModification()));
    } else if (filter.getStartTimeOfModification() != null) {
      criteria.add(Restrictions.ge("timestamp", filter.getStartTimeOfModification()));
    } else if (filter.getStopTimeOfModification() != null) {
      criteria.add(Restrictions.le("timestamp", filter.getStopTimeOfModification()));
    }
    if (filter.getModifiedByUserId() != null) {
      criteria.add(Restrictions.eq("userName", filter.getModifiedByUserId().toString()));
    }
    criteria.setCacheable(true);
    criteria.setCacheRegion("historyItemCache");
    criteria.setProjection(Projections.property("entityId"));
    final List<Integer> idList = criteria.list();
    if (idList != null && idList.size() > 0) {
      for (final Integer id : idList) {
        idSet.add(id);
      }
    }
  }
}
 
 类所在包
 同包方法