下面列出了怎么用org.hibernate.search.FullTextSession的API类实例代码及写法,或者点击链接到github查看源代码。
private void handleListIndexing(
Collection<? extends DomainObject<?>> list) {
Session session = getSession();
if (list == null || session == null) {
return;
}
FullTextSession fts = Search.getFullTextSession(session);
Transaction tx = fts.beginTransaction();
for (DomainObject<?> obj : list) {
if (obj != null) {
fts.index(obj);
}
}
tx.commit();
}
private void emptyProductIndex() throws Exception {
runTX(new Callable<Void>() {
@Override
public Void call() throws Exception {
Session session = dm.getSession();
if (session != null) {
FullTextSession fullTextSession = Search
.getFullTextSession(session);
fullTextSession.purgeAll(Product.class);
}
return null;
}
});
}
private void emptySubscriptionIndex() throws Exception {
runTX(new Callable<Void>() {
@Override
public Void call() throws Exception {
Session session = dm.getSession();
if (session != null) {
FullTextSession fullTextSession = Search
.getFullTextSession(session);
fullTextSession.purgeAll(Subscription.class);
}
return null;
}
});
}
/**
*
* @param clazz
*/
private long reindexMassIndexer(final Class< ? > clazz)
{
final Session session = getSession();
final Criteria criteria = createCriteria(session, clazz, null, true);
final Long number = (Long) criteria.uniqueResult(); // Get number of objects to re-index (select count(*) from).
log.info("Starting (mass) re-indexing of " + number + " entries of type " + clazz.getName() + "...");
final FullTextSession fullTextSession = Search.getFullTextSession(session);
try {
fullTextSession.createIndexer(clazz)//
.batchSizeToLoadObjects(25) //
//.cacheMode(CacheMode.NORMAL) //
.threadsToLoadObjects(5) //
//.threadsForIndexWriter(1) //
.threadsForSubsequentFetching(20) //
.startAndWait();
} catch (final InterruptedException ex) {
log.error("Exception encountered while reindexing: " + ex.getMessage(), ex);
}
final SearchFactory searchFactory = fullTextSession.getSearchFactory();
searchFactory.optimize(clazz);
log.info("Re-indexing of " + number + " objects of type " + clazz.getName() + " done.");
return number;
}
/**
* Do real indexes optimization.
*/
public static void optimizeIndexes() throws Exception {
FullTextSession ftSession = null;
Session session = null;
if (optimizeIndexesRunning) {
log.warn("*** Optimize indexes already running ***");
} else {
optimizeIndexesRunning = true;
log.debug("*** Begin optimize indexes ***");
try {
session = HibernateUtil.getSessionFactory().openSession();
ftSession = Search.getFullTextSession(session);
// Optimize indexes
SearchFactory searchFactory = ftSession.getSearchFactory();
searchFactory.optimize();
} catch (Exception e) {
throw e;
} finally {
optimizeIndexesRunning = false;
HibernateUtil.close(ftSession);
HibernateUtil.close(session);
}
log.debug("*** End optimize indexes ***");
}
}
/**
* Security is evaluated by Lucene, so query result are already pruned. This means that every node
* should have its security (user and role) info stored in Lucene. This provides very quick search
* but security modifications need to be recursively applied to reach every document node in the
* repository. This may take several hours (or days) is big repositories.
*/
@SuppressWarnings("unchecked")
private NodeResultSet runQueryLucene(FullTextSession ftSession, Query query, int offset, int limit)
throws IOException, InvalidTokenOffsetsException, HibernateException, DatabaseException {
log.debug("runQueryLucene({}, {}, {}, {})", new Object[]{ftSession, query, offset, limit});
List<NodeQueryResult> results = new ArrayList<NodeQueryResult>();
NodeResultSet result = new NodeResultSet();
FullTextQuery ftq = ftSession.createFullTextQuery(query, NodeDocument.class, NodeFolder.class, NodeMail.class);
ftq.setProjection(FullTextQuery.SCORE, FullTextQuery.THIS);
ftq.enableFullTextFilter("readAccess");
QueryScorer scorer = new QueryScorer(query, NodeDocument.TEXT_FIELD);
// Set limits
ftq.setFirstResult(offset);
ftq.setMaxResults(limit);
// Highlight using a CSS style
SimpleHTMLFormatter formatter = new SimpleHTMLFormatter("<span class='highlight'>", "</span>");
Highlighter highlighter = new Highlighter(formatter, scorer);
highlighter.setTextFragmenter(new SimpleSpanFragmenter(scorer, MAX_FRAGMENT_LEN));
for (Iterator<Object[]> it = ftq.iterate(); it.hasNext(); ) {
Object[] qRes = it.next();
Float score = (Float) qRes[0];
NodeBase nBase = (NodeBase) qRes[1];
// Add result
addResult(ftSession, results, highlighter, score, nBase);
}
result.setTotal(ftq.getResultSize());
result.setResults(results);
log.debug("runQueryLucene: {}", result);
return result;
}
/**
* Get Lucene index reader.
*/
@SuppressWarnings("rawtypes")
private IndexReader getReader(FullTextSession session, Class entity) {
SearchFactory searchFactory = session.getSearchFactory();
DirectoryProvider provider = searchFactory.getDirectoryProviders(entity)[0];
ReaderProvider readerProvider = searchFactory.getReaderProvider();
return readerProvider.openReader(provider);
}
/**
* Check if this uuid represents a mail node.
* <p>
* Used in SearchDAO, and should exposed in other method should make Security Check
*/
public boolean isMail(FullTextSession ftSession, String uuid) throws HibernateException {
log.debug("isMail({}, {})", ftSession, uuid);
boolean ret = ftSession.get(NodeMail.class, uuid) instanceof NodeMail;
log.debug("isMail: {}", ret);
return ret;
}
private void handleObjectIndexing(Object parameter) {
Session session = getSession();
if (parameter == null || session == null) {
return;
}
FullTextSession fts = Search.getFullTextSession(session);
Transaction tx = fts.beginTransaction();
fts.index(parameter);
tx.commit();
}
/**
* Performs a search in Lucene and puts the resulting product object ids in
* a corresponding map.
*
* @param query
* the Lucene query
* @param fts
* the Hibernate Search FullTextSession
* @param map
* the map for the search results
* @throws HibernateException
*/
private void searchViaLucene(org.apache.lucene.search.Query query,
FullTextSession fts, LinkedHashMap<Long, VOService> map)
throws HibernateException {
FullTextQuery ftQuery = fts.createFullTextQuery(query, Product.class);
ftQuery.setProjection("key");
List<?> result = ftQuery.list();
if (result != null) {
for (Object item : result) {
map.put((Long) ((Object[]) item)[0], null);
}
}
}
protected static boolean reindexEntityClass(Class<?> c) {
String entityName = c.getName().substring(
c.getPackage().getName().length() + 1);
log("---Reindexing: " + entityName + "---");
long time = System.currentTimeMillis();
org.hibernate.search.FullTextSession ftSession = getFullTextSession();
try {
ftSession
.createIndexer(c)
.progressMonitor(
new org.webdsl.search.IndexProgressMonitor(2000,
entityName)).batchSizeToLoadObjects(15)
.threadsToLoadObjects(1).threadsForSubsequentFetching(2)
.threadsForIndexWriter(1).purgeAllOnStart(true)
.startAndWait();
} catch (Exception ex) {
org.webdsl.logging.Logger.error(
"Error during reindexing of entity: " + entityName, ex);
return false;
} finally {
if (ftSession != null) {
ftSession.close();
ftSession = null;
}
}
time = System.currentTimeMillis() - time;
log("---Done in " + time + "ms.---");
return true;
}
public static void tryDropIndex() {
if ("create-drop".equals(utils.BuildProperties.getDbMode())) {
log("Db-mode is set to create-drop -> Clearing search indexes");
FullTextSession fts = getFullTextSession();
fts.purgeAll(Object.class);
fts.getSearchFactory().optimize();
fts.flushToIndexes();
log("Clearing search indexes successful");
}
}
protected FullTextSession getFullTextSession() {
if( fullTextSession == null ) {
fullTextSession = org.hibernate.search.Search.getFullTextSession( HibernateUtil.getCurrentSession() );
updateFullTextQuery = true;
}
return fullTextSession;
}
public static <T> FacetingRequest toFacetingRequest(String field, String rangeAsString, Class<?> entityClass, Class<T> type, FullTextSession fts){
List<FacetRange<T>> facetRangeList = new ArrayList<FacetRange<T>>();
DocumentBuilderIndexedEntity<?> documentBuilder = ContextHelper.getSearchFactory(fts).getDocumentBuilderIndexedEntity(entityClass);
Matcher matcher = p.matcher( rangeAsString );
FacetRange<T> range;
T min, max;
Class<?> targetClass;
boolean includeMin, includeMax;
while(matcher.find()){
includeMin = matcher.group(1).equals("[");
min = ( matcher.group( 2 ).isEmpty() ) ? null : (T) stringToTypedObject( matcher.group( 2 ).trim(), type );
max = ( matcher.group( 3 ).isEmpty() ) ? null : (T) stringToTypedObject( matcher.group( 3 ).trim(), type );
includeMax = matcher.group(4).equals("]");
targetClass = (min != null) ? min.getClass() : max.getClass();
range = new FacetRange<T>( targetClass, min, max, includeMin, includeMax, field, documentBuilder );
facetRangeList.add(range);
}
FacetingRequestImpl rfr = new RangeFacetRequest<T>( facetName(field), field, facetRangeList, documentBuilder );
rfr.setSort( FacetSortOrder.RANGE_DEFINITION_ODER );
rfr.setIncludeZeroCounts( false );
rfr.setMaxNumberOfFacets( facetRangeList.size() );
return rfr;
}
@Async
@Transactional(propagation = Propagation.SUPPORTS)
public void reIndex() throws Exception {
logger.info("Re-Index started");
FullTextSession fullTextSession = Search.getFullTextSession((entityManager.unwrap(Session.class)));
fullTextSession.setFlushMode(FlushMode.MANUAL);
fullTextSession.setCacheMode(CacheMode.IGNORE);
for (Class persistentClass : fullTextSession.getSearchFactory().getIndexedTypes()) {
Transaction transaction = fullTextSession.beginTransaction();
// Scrollable results will avoid loading too many objects in memory
ScrollableResults results = fullTextSession.createCriteria(persistentClass)
.setFetchSize(BATCH_SIZE)
.scroll(ScrollMode.FORWARD_ONLY);
int index = 0;
while (results.next()) {
index++;
fullTextSession.index(results.get(0)); //index each element
if (index % BATCH_SIZE == 0) {
fullTextSession.flushToIndexes(); //apply changes to indexes
fullTextSession.clear(); //free memory since the queue is processed
}
}
transaction.commit();
}
logger.info("Re-Index finished");
}
@Override
public void afterPropertiesSet() throws Exception {
//重建索引
FullTextSession fullTextSession = Search.getFullTextSession(sessionFactory.openSession());
fullTextSession.createIndexer().startAndWait();
}
private void reindexDependents(final HibernateTemplate hibernateTemplate, final Session session, final BaseDO< ? > obj,
final Set<String> alreadyReindexed)
{
if (alreadyReindexed.contains(getReindexId(obj)) == true) {
if (log.isDebugEnabled() == true) {
log.debug("Object already re-indexed (skipping): " + getReindexId(obj));
}
return;
}
session.flush(); // Needed to flush the object changes!
final FullTextSession fullTextSession = Search.getFullTextSession(session);
fullTextSession.setFlushMode(FlushMode.AUTO);
fullTextSession.setCacheMode(CacheMode.IGNORE);
try {
BaseDO< ? > dbObj = (BaseDO< ? >) session.get(obj.getClass(), obj.getId());
if (dbObj == null) {
dbObj = (BaseDO< ? >) session.load(obj.getClass(), obj.getId());
}
fullTextSession.index(dbObj);
alreadyReindexed.add(getReindexId(dbObj));
if (log.isDebugEnabled() == true) {
log.debug("Object added to index: " + getReindexId(dbObj));
}
} catch (final Exception ex) {
// Don't fail if any exception while re-indexing occurs.
log.info("Fail to re-index " + obj.getClass() + ": " + ex.getMessage());
}
// session.flush(); // clear every batchSize since the queue is processed
final List<Entry> entryList = map.get(obj.getClass());
reindexDependents(hibernateTemplate, session, obj, entryList, alreadyReindexed);
}
private org.hibernate.search.FullTextQuery createNonJpaFullTextQuery(Query query) {
FullTextSession fullTextSession = org.hibernate.search.Search.getFullTextSession(entityManager.unwrap(Session.class));
return fullTextSession.createFullTextQuery(query, Dog.class);
}
/**
* MassIndexer implementation.
*/
@SuppressWarnings("rawtypes")
private void luceneIndexesMassIndexer(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
log.debug("luceneIndexesMassIndexer({}, {})", request, response);
PrintWriter out = response.getWriter();
response.setContentType(MimeTypeConfig.MIME_HTML);
header(out, "Rebuild Lucene indexes", breadcrumb);
out.flush();
FullTextSession ftSession = null;
Session session = null;
// Activity log
UserActivity.log(request.getRemoteUser(), "ADMIN_FORCE_REBUILD_INDEXES", null, null, null);
try {
Config.SYSTEM_MAINTENANCE = true;
Config.SYSTEM_READONLY = true;
out.println("<ul>");
out.println("<li>System into maintenance mode</li>");
FileLogger.info(BASE_NAME, "BEGIN - Rebuild Lucene indexes");
session = HibernateUtil.getSessionFactory().openSession();
ftSession = Search.getFullTextSession(session);
long total = 0;
// Calculate number of entities
for (Class cls : classes) {
String nodeType = cls.getSimpleName();
out.println("<li>Calculate " + nodeType + "</li>");
out.flush();
long partial = NodeBaseDAO.getInstance().getCount(nodeType);
FileLogger.info(BASE_NAME, "Number of {0}: {1}", nodeType, partial);
out.println("<li>Number of " + nodeType + ": " + partial + "</li>");
out.flush();
total += partial;
}
// Rebuild indexes
out.println("<li>Rebuilding indexes</li>");
out.flush();
ProgressMonitor monitor = new ProgressMonitor(out, "NodeBase", (int) total);
ftSession.createIndexer()
.batchSizeToLoadObjects(Config.HIBERNATE_INDEXER_BATCH_SIZE_LOAD_OBJECTS)
.threadsForSubsequentFetching(Config.HIBERNATE_INDEXER_THREADS_SUBSEQUENT_FETCHING)
.threadsToLoadObjects(Config.HIBERNATE_INDEXER_THREADS_LOAD_OBJECTS)
.threadsForIndexWriter(Config.HIBERNATE_INDEXER_THREADS_INDEX_WRITER)
.cacheMode(CacheMode.NORMAL) // defaults to CacheMode.IGNORE
.progressMonitor(monitor).startAndWait();
Config.SYSTEM_READONLY = false;
Config.SYSTEM_MAINTENANCE = false;
out.println("<li>System out of maintenance mode</li>");
out.flush();
// Finalized
out.println("<li>Index rebuilding completed!</li>");
out.println("</ul>");
out.flush();
} catch (Exception e) {
FileLogger.error(BASE_NAME, StackTraceUtils.toString(e));
out.println("<div class=\"warn\">Exception: " + e.getMessage() + "</div>");
out.flush();
} finally {
Config.SYSTEM_READONLY = false;
Config.SYSTEM_MAINTENANCE = false;
HibernateUtil.close(ftSession);
HibernateUtil.close(session);
}
// Finalized
FileLogger.info(BASE_NAME, "END - Rebuild Lucene indexes");
// End page
footer(out);
out.flush();
out.close();
log.debug("luceneIndexesMassIndexer: void");
}
protected int doRebuildIndex() throws Exception {
FullTextSession fullTextSession = (FullTextSession) entityManager.getDelegate();
fullTextSession.setFlushMode(org.hibernate.FlushMode.MANUAL);
fullTextSession.setCacheMode(org.hibernate.CacheMode.IGNORE);
fullTextSession.purgeAll(NodeDocumentVersion.class);
fullTextSession.getSearchFactory().optimize(NodeDocumentVersion.class);
String query = "select ndv from NodeDocumentVersion ndv";
ScrollableResults cursor = fullTextSession.createQuery(query).scroll();
cursor.last();
int count = cursor.getRowNumber() + 1;
log.warn("Re-building Wine index for " + count + " objects.");
if (count > 0) {
int batchSize = 300;
cursor.first(); // Reset to first result row
int i = 0;
while (true) {
fullTextSession.index(cursor.get(0));
if (++i % batchSize == 0) {
fullTextSession.flushToIndexes();
fullTextSession.clear(); // Clear persistence context for each batch
log.info("Flushed index update " + i + " from Thread "
+ Thread.currentThread().getName());
}
if (cursor.isLast()) {
break;
}
cursor.next();
}
}
cursor.close();
fullTextSession.flushToIndexes();
fullTextSession.clear(); // Clear persistence context for each batch
fullTextSession.getSearchFactory().optimize(NodeDocumentVersion.class);
return count;
}
/**
* Add result
*/
private void addResult(FullTextSession ftSession, List<NodeQueryResult> results, Highlighter highlighter, Float score,
NodeBase nBase) throws IOException, InvalidTokenOffsetsException, DatabaseException {
NodeQueryResult qr = new NodeQueryResult();
NodeDocument nDocument = null;
NodeMail nMail = null;
String excerpt = null;
if (nBase instanceof NodeDocument) {
nDocument = (NodeDocument) nBase;
if (NodeMailDAO.getInstance().itemExists(nDocument.getParent())) {
log.debug("NODE DOCUMENT - ATTACHMENT");
qr.setAttachment(nDocument);
} else {
log.debug("NODE DOCUMENT");
qr.setDocument(nDocument);
}
} else if (nBase instanceof NodeFolder) {
log.debug("NODE FOLDER");
NodeFolder nFld = (NodeFolder) nBase;
qr.setFolder(nFld);
} else if (nBase instanceof NodeMail) {
log.debug("NODE MAIL");
nMail = (NodeMail) nBase;
qr.setMail(nMail);
} else {
log.warn("NODE UNKNOWN");
}
if (nDocument != null && nDocument.getText() != null) {
excerpt = highlighter.getBestFragment(analyzer, NodeDocument.TEXT_FIELD, nDocument.getText());
} else if (nMail != null && nMail.getContent() != null) {
excerpt = highlighter.getBestFragment(analyzer, NodeMail.CONTENT_FIELD, nMail.getContent());
}
log.debug("Result: SCORE({}), EXCERPT({}), DOCUMENT({})", new Object[]{score, excerpt, nBase});
qr.setScore(score);
qr.setExcerpt(FormatUtil.stripNonValidXMLCharacters(excerpt));
if (qr.getDocument() != null) {
NodeDocumentDAO.getInstance().initialize(qr.getDocument(), false);
results.add(qr);
} else if (qr.getFolder() != null) {
NodeFolderDAO.getInstance().initialize(qr.getFolder());
results.add(qr);
} else if (qr.getMail() != null) {
NodeMailDAO.getInstance().initialize(qr.getMail(), false);
results.add(qr);
} else if (qr.getAttachment() != null) {
NodeDocumentDAO.getInstance().initialize(qr.getAttachment(), false);
results.add(qr);
}
}
/**
* Get Lucent document terms.
*/
@SuppressWarnings("unchecked")
public List<String> getTerms(Class<?> entityType, String nodeUuid) throws CorruptIndexException, IOException {
List<String> terms = new ArrayList<String>();
FullTextSession ftSession = null;
IndexSearcher searcher = null;
ReaderProvider provider = null;
Session session = null;
IndexReader reader = null;
try {
session = HibernateUtil.getSessionFactory().openSession();
ftSession = Search.getFullTextSession(session);
SearchFactory sFactory = ftSession.getSearchFactory();
provider = sFactory.getReaderProvider();
QueryBuilder builder = sFactory.buildQueryBuilder().forEntity(entityType).get();
Query query = builder.keyword().onField("uuid").matching(nodeUuid).createQuery();
DirectoryProvider<Directory>[] dirProv = sFactory.getDirectoryProviders(NodeDocument.class);
reader = provider.openReader(dirProv[0]);
searcher = new IndexSearcher(reader);
TopDocs topDocs = searcher.search(query, 1);
for (ScoreDoc sDoc : topDocs.scoreDocs) {
if (!reader.isDeleted(sDoc.doc)) {
for (TermEnum te = reader.terms(); te.next(); ) {
Term t = te.term();
if ("text".equals(t.field())) {
for (TermDocs tds = reader.termDocs(t); tds.next(); ) {
if (sDoc.doc == tds.doc()) {
terms.add(t.text());
//log.info("Field: {} - {}", t.field(), t.text());
}
}
}
}
}
}
} finally {
if (provider != null && reader != null) {
provider.closeReader(reader);
}
if (searcher != null) {
searcher.close();
}
HibernateUtil.close(ftSession);
HibernateUtil.close(session);
}
return terms;
}
private void assertDocsInIndex(final Class<?> clazz, final String comment,
final int expectedNumDocs, final int expectedNumIndexedAttributes,
final List<String> expectedAttributes) throws Exception {
Boolean evaluationTookPlace = runTX(new Callable<Boolean>() {
@Override
public Boolean call() throws Exception {
boolean evaluatedIndex = false;
Session session = dm.getSession();
if (session != null) {
FullTextSession fullTextSession = Search
.getFullTextSession(session);
SearchFactory searchFactory = fullTextSession
.getSearchFactory();
IndexReader reader = searchFactory.getIndexReaderAccessor()
.open(clazz);
try {
assertEquals(comment, expectedNumDocs,
reader.numDocs());
if (expectedNumDocs > 0) {
final FieldInfos indexedFieldNames = MultiFields
.getMergedFieldInfos(reader);
for (String expectedAttr : expectedAttributes) {
assertNotNull(
"attribute " + expectedAttr
+ " does not exist in index: "
+ indexedFieldNames,
indexedFieldNames
.fieldInfo(expectedAttr));
}
assertNotNull(
"attribute \"key\" does not exist in index: "
+ indexedFieldNames,
indexedFieldNames.fieldInfo("key"));
assertNotNull(
"attribute \"_hibernate_class\" does not exist in index: "
+ indexedFieldNames,
indexedFieldNames
.fieldInfo("_hibernate_class"));
assertEquals(
"More or less attributes indexed than expected, attributes retrieved from index: "
+ indexedFieldNames,
expectedNumIndexedAttributes + 2,
indexedFieldNames.size());
evaluatedIndex = true;
}
} finally {
searchFactory.getIndexReaderAccessor().close(reader);
}
}
return Boolean.valueOf(evaluatedIndex);
}
});
if (expectedNumDocs > 0) {
Assert.assertTrue("Index not found, no evaluation took place",
evaluationTookPlace.booleanValue());
}
}
protected static FullTextSession getFullTextSession() {
return org.hibernate.search.Search
.getFullTextSession(utils.HibernateUtil.getCurrentSession());
}
public void contextInitialized(ServletContextEvent event) {
event.getServletContext().setAttribute("mode", "slave");
// For demonstration purposes, we will have about half the data created by the master node... and the other half created by
// the slave node. After a few seconds, both nodes will refresh their local copies of the index using the overall master... and
// all of the App entities will be searchable from either node.
FullTextSession fullTextSession = Search.getFullTextSession( openSession() );
fullTextSession.beginTransaction();
//
// Get references to the 5 devices, which should have already been populated in the database by the master node
//
Device xPhone = (Device) fullTextSession.createQuery( "from Device as device where device.name = ?" ).setString(0, "xPhone").uniqueResult();
Device xTablet = (Device) fullTextSession.createQuery( "from Device as device where device.name = ?" ).setString(0, "xTablet").uniqueResult();
Device solarSystem = (Device) fullTextSession.createQuery( "from Device as device where device.name = ?" ).setString(0, "Solar System Phone").uniqueResult();
Device flame = (Device) fullTextSession.createQuery( "from Device as device where device.name = ?" ).setString(0, "Flame Book Reader").uniqueResult();
Device pc = (Device) fullTextSession.createQuery( "from Device as device where device.name = ?" ).setString(0, "Personal Computer").uniqueResult();
//
// Create and persist the remaining 6 of 12 apps with devices and customer reviews
//
App frustratedFlamingos = new App(
"Frustrated Flamingos",
"flamingo.jpg",
"A fun little game app, where you throw large birds around for no apparent reason. Why else do you think they're so frustrated?",
"Games",
0.99f);
frustratedFlamingos.setSupportedDevices( new HashSet<Device>(Arrays.asList(new Device[] { xPhone, xTablet, solarSystem, flame, pc })) );
CustomerReview frustratedFlamingosReview = new CustomerReview("BirdSlinger", 4, "LOL, I love catapulting the flamingos into the cows! I hate how the advertisement banner hides part of the view, tho.");
frustratedFlamingos.setCustomerReviews( new HashSet<CustomerReview>(Arrays.asList(new CustomerReview[] { frustratedFlamingosReview })) );
fullTextSession.save(frustratedFlamingos);
logger.info("Persisting " + frustratedFlamingos.getName());
App grype = new App(
"Grype Video Conferencing",
"laptop.jpg",
"Make free local and international calls, with video, using this app and your home Internet connection. Better yet, make free calls using your employer's Internet connection!",
"Internet",
3.99f);
grype.setSupportedDevices( new HashSet<Device>(Arrays.asList(new Device[] { xPhone, xTablet, solarSystem, pc })) );
CustomerReview grypeReview = new CustomerReview("office.casual", 4, "I wish they had not added video to this app in the latest version. I liked it much more back when I didn't have to get dressed.");
grype.setCustomerReviews( new HashSet<CustomerReview>(Arrays.asList(new CustomerReview[] { grypeReview })) );
fullTextSession.save(grype);
logger.info("Persisting " + grype.getName());
App eReader = new App(
"E-Book Reader",
"book.jpg",
"Read books on your computer, or on the go from your mobile device with this powerful e-reader app. We recommend \"Hibernate Search by Example\", from Packt Publishing.",
"Media",
1.99f);
eReader.setSupportedDevices( new HashSet<Device>(Arrays.asList(new Device[] { xPhone, xTablet, solarSystem, flame, pc })) );
CustomerReview eReaderReview = new CustomerReview("StevePerkins", 5, "This 'Hibernate Search by Example' book is brilliant! Thanks for the recommendation!");
eReader.setCustomerReviews( new HashSet<CustomerReview>(Arrays.asList(new CustomerReview[] { eReaderReview })) );
fullTextSession.save(eReader);
logger.info("Persisting " + eReader.getName());
App domeBrowser = new App(
"Dome Web Browser",
"orangeswirls.jpg",
"This amazing app allows us to track all of your online activity. We can figure out where you live, what you had for breakfast this morning, or what your closest secrets are. The app also includes a web browser.",
"Internet",
0);
domeBrowser.setSupportedDevices( new HashSet<Device>(Arrays.asList(new Device[] { solarSystem, flame, pc })) );
CustomerReview domeBrowserReview = new CustomerReview("TinFoilHat", 1, "I uninstalled this app. If the government would fake a moon landing, then they would definately use my browser history to come after me.");
domeBrowser.setCustomerReviews( new HashSet<CustomerReview>(Arrays.asList(new CustomerReview[] { domeBrowserReview })) );
fullTextSession.save(domeBrowser);
logger.info("Persisting " + domeBrowser.getName());
App athenaRadio = new App(
"Athena Internet Radio",
"jamming.jpg",
"Listen to your favorite songs on streaming Internet radio! When you like a song, this app will play more songs similar to that one. Or at least it plays more songs... to be honest, sometimes they're not all that similar. :(",
"Media",
3.99f);
athenaRadio.setSupportedDevices( new HashSet<Device>(Arrays.asList(new Device[] { xPhone, xTablet, solarSystem, flame, pc })) );
CustomerReview athenaRadioReview = new CustomerReview("lskinner", 5, "I requested 'Free Bird', and this app played 'Free Bird'. What's not to like?");
athenaRadio.setCustomerReviews( new HashSet<CustomerReview>(Arrays.asList(new CustomerReview[] { athenaRadioReview })) );
fullTextSession.save(athenaRadio);
logger.info("Persisting " + athenaRadio.getName());
App mapJourney = new App(
"Map Journey",
"compass.jpg",
"Do you need directions to help you reach a destination? This GPS app will definitely produce enough turn-by-turn directions to get you there! Eventually.",
"Travel",
0.99f);
mapJourney.setSupportedDevices( new HashSet<Device>(Arrays.asList(new Device[] { xPhone, solarSystem, pc })) );
CustomerReview mapJourneyReview = new CustomerReview("LostInSpace", 3, "Not great... but still WAY better than Orange maps.");
mapJourney.setCustomerReviews( new HashSet<CustomerReview>(Arrays.asList(new CustomerReview[] { mapJourneyReview })) );
fullTextSession.save(mapJourney);
logger.info("Persisting " + mapJourney.getName());
//
// Close and cleanup the Hibernate session
//
fullTextSession.getTransaction().commit();
fullTextSession.close();
}
/**
* This method contains the primary search functionality for this servlet, and is automatically invoked once for every HTTP
* POST to the mapped URL.
*/
@SuppressWarnings("unchecked")
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
Logger logger = LoggerFactory.getLogger(SearchServlet.class);
// Get the user's search keyword(s) from CGI variables
String searchString = request.getParameter("searchString");
logger.info("Received searchString [" + searchString + "]");
// Start a Hibernate session.
Session session = StartupDataLoader.openSession();
// Create a Hibernate Search wrapper around the vanilla Hibernate session
FullTextSession fullTextSession = Search.getFullTextSession(session);
// Begin a transaction. This may not be strictly necessary, but is a good practice in general.
fullTextSession.beginTransaction();
// Create a Hibernate Search QueryBuilder for the appropriate Lucene index (i.e. the index for "App" in this case)
QueryBuilder queryBuilder = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity( App.class ).get();
// Use the QueryBuilder to construct a Lucene keyword query... matching the user's search keywords against the "name"
// and "description" fields of App, as well as "name" field of associated Device entities, and the "comments" field of
// embedded CustomerReview objects.
org.apache.lucene.search.Query luceneQuery = queryBuilder
.keyword()
.onFields("name", "description", "supportedDevices.name", "customerReviews.comments")
.matching(searchString)
.createQuery();
org.hibernate.Query hibernateQuery = fullTextSession.createFullTextQuery(luceneQuery, App.class);
List<App> apps = hibernateQuery.list();
logger.info("Found " + apps.size() + " apps");
// Detach the results from the Hibernate session (to prevent unwanted interaction between the view layer
// and Hibernate when associated devices or embedded customer reviews are referenced)
fullTextSession.clear();
// Put the search results on the HTTP reqeust object
request.setAttribute("apps", apps);
// Close and clean up the Hibernate session
fullTextSession.getTransaction().commit();
session.close();
// Forward the request object (including the search results) to the JSP/JSTL view for rendering
getServletContext().getRequestDispatcher("/WEB-INF/pages/search.jsp").forward(request, response);
}
/**
* This method contains the primary search functionality for this servlet, and is automatically invoked once for every HTTP
* POST to the mapped URL.
*/
@SuppressWarnings("unchecked")
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
Logger logger = LoggerFactory.getLogger(SearchServlet.class);
// Get the user's search keyword(s) from CGI variables
String searchString = request.getParameter("searchString");
logger.info("Received searchString [" + searchString + "]");
// Start a Hibernate session.
Session session = StartupDataLoader.openSession();
// Create a Hibernate Search wrapper around the vanilla Hibernate session
FullTextSession fullTextSession = Search.getFullTextSession(session);
// Begin a transaction. This may not be strictly necessary, but is a good practice in general.
fullTextSession.beginTransaction();
// Create a Hibernate Search QueryBuilder for the appropriate Lucene index (i.e. the index for "App" in this case)
QueryBuilder queryBuilder = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity( App.class ).get();
// Use the QueryBuilder to construct a Lucene keyword query, matching the user's search keywords against the name
// and description fields of App.
org.apache.lucene.search.Query luceneQuery = queryBuilder
.keyword()
.onFields("name", "description")
.matching(searchString)
.createQuery();
org.hibernate.Query hibernateQuery = fullTextSession.createFullTextQuery(luceneQuery, App.class);
// Perform the search query, and put its results on the HTTP request object
List<App> apps = hibernateQuery.list();
logger.info("Found " + apps.size() + " search results");
request.setAttribute("apps", apps);
// Close and clean up the Hibernate session
fullTextSession.getTransaction().commit();
session.close();
// Forward the request object (including the search results) to the JSP/JSTL view for rendering
getServletContext().getRequestDispatcher("/WEB-INF/pages/search.jsp").forward(request, response);
}
/**
* This method contains the primary search functionality for this servlet, and is automatically invoked once for every HTTP
* POST to the mapped URL.
*/
@SuppressWarnings("unchecked")
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
Logger logger = LoggerFactory.getLogger(SearchServlet.class);
// Get the user's search keyword(s) from CGI variables
String searchString = request.getParameter("searchString");
logger.info("Received searchString [" + searchString + "]");
// Start a Hibernate session.
Session session = StartupDataLoader.openSession();
// Create a Hibernate Search wrapper around the vanilla Hibernate session
FullTextSession fullTextSession = Search.getFullTextSession(session);
// Begin a transaction. This may not be strictly necessary, but is a good practice in general.
fullTextSession.beginTransaction();
// Create a Hibernate Search QueryBuilder for the appropriate Lucene index (i.e. the index for "App" in this case)
QueryBuilder queryBuilder = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity( App.class ).get();
// Use the QueryBuilder to construct a Lucene keyword query... matching the user's search keywords against the "name"
// and "description" fields of App, as well as "name" field of associated Device entities, and the "comments" field of
// embedded CustomerReview objects.
org.apache.lucene.search.Query luceneQuery = queryBuilder
.keyword()
.onFields("name", "description", "supportedDevices.name", "customerReviews.comments")
.matching(searchString)
.createQuery();
org.hibernate.Query hibernateQuery = fullTextSession.createFullTextQuery(luceneQuery, App.class);
List<App> apps = hibernateQuery.list();
logger.info("Found " + apps.size() + " apps");
// Detach the results from the Hibernate session (to prevent unwanted interaction between the view layer
// and Hibernate when associated devices or embedded customer reviews are referenced)
fullTextSession.clear();
// Put the search results on the HTTP reqeust object
request.setAttribute("apps", apps);
// Close and clean up the Hibernate session
fullTextSession.getTransaction().commit();
session.close();
// Forward the request object (including the search results) to the JSP/JSTL view for rendering
getServletContext().getRequestDispatcher("/WEB-INF/pages/search.jsp").forward(request, response);
}
/**
* This method contains the primary search functionality for this servlet, and is automatically invoked once for every HTTP
* POST to the mapped URL.
*/
@SuppressWarnings("unchecked")
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
Logger logger = LoggerFactory.getLogger(SearchServlet.class);
// Get the user's search keyword(s) from CGI variables
String searchString = request.getParameter("searchString");
logger.info("Received searchString [" + searchString + "]");
// Start a Hibernate session.
Session session = StartupDataLoader.openSession();
// Create a Hibernate Search wrapper around the vanilla Hibernate session
FullTextSession fullTextSession = Search.getFullTextSession(session);
// Begin a transaction. This may not be strictly necessary, but is a good practice in general.
fullTextSession.beginTransaction();
// Create a Hibernate Search QueryBuilder for the appropriate Lucene index (i.e. the index for "App" in this case)
QueryBuilder queryBuilder = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity( App.class ).get();
// Use the QueryBuilder to construct a Lucene keyword query, matching the user's search keywords against the name
// and description fields of App.
org.apache.lucene.search.Query luceneQuery = queryBuilder
.keyword()
.onFields("name", "description")
.matching(searchString)
.createQuery();
org.hibernate.Query hibernateQuery = fullTextSession.createFullTextQuery(luceneQuery, App.class);
// Perform the search query, and put its results on the HTTP request object
List<App> apps = hibernateQuery.list();
logger.info("Found " + apps.size() + " search results");
request.setAttribute("apps", apps);
// Close and clean up the Hibernate session
fullTextSession.getTransaction().commit();
session.close();
// Forward the request object (including the search results) to the JSP/JSTL view for rendering
getServletContext().getRequestDispatcher("/WEB-INF/pages/search.jsp").forward(request, response);
}
/**
* This method contains the primary search functionality for this servlet, and is automatically invoked once for every HTTP
* POST to the mapped URL.
*/
@SuppressWarnings("unchecked")
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
Logger logger = LoggerFactory.getLogger(SearchServlet.class);
// Get the user's search keyword(s) from CGI variables
String searchString = request.getParameter("searchString");
logger.info("Received searchString [" + searchString + "]");
// Start a Hibernate session.
Session session = StartupDataLoader.openSession();
// Create a Hibernate Search wrapper around the vanilla Hibernate session
FullTextSession fullTextSession = Search.getFullTextSession(session);
// Begin a transaction. This may not be strictly necessary, but is a good practice in general.
fullTextSession.beginTransaction();
// Create a Hibernate Search QueryBuilder for the appropriate Lucene index (i.e. the index for "App" in this case)
QueryBuilder queryBuilder = fullTextSession.getSearchFactory().buildQueryBuilder().forEntity( App.class ).get();
// Use the QueryBuilder to construct a Lucene keyword query... matching the user's search keywords against the "name"
// and "description" fields of App, as well as "name" field of associated Device entities, and the "comments" field of
// embedded CustomerReview objects.
org.apache.lucene.search.Query luceneQuery = queryBuilder
.keyword()
.onFields("name", "description", "supportedDevices.name", "customerReviews.comments")
.matching(searchString)
.createQuery();
org.hibernate.Query hibernateQuery = fullTextSession.createFullTextQuery(luceneQuery, App.class);
List<App> apps = hibernateQuery.list();
logger.info("Found " + apps.size() + " apps");
// Detach the results from the Hibernate session (to prevent unwanted interaction between the view layer
// and Hibernate when associated devices or embedded customer reviews are referenced)
fullTextSession.clear();
// Put the search results on the HTTP reqeust object
request.setAttribute("apps", apps);
// Close and clean up the Hibernate session
fullTextSession.getTransaction().commit();
session.close();
// Forward the request object (including the search results) to the JSP/JSTL view for rendering
getServletContext().getRequestDispatcher("/WEB-INF/pages/search.jsp").forward(request, response);
}