下面列出了org.apache.lucene.search.highlight.SimpleFragmenter#org.apache.lucene.document.IntPoint 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
void addToDoc(IndexKey key, Integer value){
Preconditions.checkArgument(key.getValueType() == Integer.class);
if(value == null){
return;
}
checkIfMultiValueField(key);
final String indexFieldName = key.getIndexFieldName();
doc.add(new IntPoint(indexFieldName, value));
if (key.isStored()) {
doc.add(new StoredField(indexFieldName, value));
}
if (key.isSorted()) {
Preconditions.checkArgument(key.getSortedValueType() == SearchFieldSorting.FieldType.INTEGER);
doc.add(new NumericDocValuesField(indexFieldName, value));
}
}
public void testDimensionalRangeQuery() throws Exception {
// doesn't currently highlight, but make sure it doesn't cause exception either
query = IntPoint.newRangeQuery(NUMERIC_FIELD_NAME, 2, 6);
searcher = newSearcher(reader);
hits = searcher.search(query, 100);
int maxNumFragmentsRequired = 2;
QueryScorer scorer = new QueryScorer(query, FIELD_NAME);
Highlighter highlighter = new Highlighter(this, scorer);
for (int i = 0; i < hits.totalHits.value; i++) {
String text = searcher.doc(hits.scoreDocs[i].doc).getField(NUMERIC_FIELD_NAME).numericValue().toString();
TokenStream tokenStream = analyzer.tokenStream(FIELD_NAME, text);
highlighter.setTextFragmenter(new SimpleFragmenter(40));
// String result =
highlighter.getBestFragments(tokenStream, text, maxNumFragmentsRequired,"...");
//if (VERBOSE) System.out.println("\t" + result);
}
}
public DocState(boolean reuseFields, FieldType ft, FieldType bodyFt) {
this.reuseFields = reuseFields;
if (reuseFields) {
fields = new HashMap<>();
numericFields = new HashMap<>();
// Initialize the map with the default fields.
fields.put(BODY_FIELD, new Field(BODY_FIELD, "", bodyFt));
fields.put(TITLE_FIELD, new Field(TITLE_FIELD, "", ft));
fields.put(DATE_FIELD, new Field(DATE_FIELD, "", ft));
fields.put(ID_FIELD, new StringField(ID_FIELD, "", Field.Store.YES));
fields.put(NAME_FIELD, new Field(NAME_FIELD, "", ft));
numericFields.put(DATE_MSEC_FIELD, new LongPoint(DATE_MSEC_FIELD, 0L));
numericFields.put(TIME_SEC_FIELD, new IntPoint(TIME_SEC_FIELD, 0));
doc = new Document();
} else {
numericFields = null;
fields = null;
doc = null;
}
}
public void testMultiValuedPointsSortedCorrectly() throws Exception {
Document doc = new Document();
doc.add(new IntPoint("ints", 3));
doc.add(new IntPoint("ints", 2));
doc.add(new IntPoint("ints", 1));
doc.add(new LongPoint("longs", 3L));
doc.add(new LongPoint("longs", 2L));
doc.add(new LongPoint("longs", 1L));
doc.add(new FloatPoint("floats", 3F));
doc.add(new FloatPoint("floats", 2F));
doc.add(new FloatPoint("floats", 1F));
doc.add(new DoublePoint("doubles", 3D));
doc.add(new DoublePoint("doubles", 2D));
doc.add(new DoublePoint("doubles", 1D));
MemoryIndex mi = MemoryIndex.fromDocument(doc, analyzer);
IndexSearcher s = mi.createSearcher();
assertEquals(1, s.count(IntPoint.newSetQuery("ints", 2)));
assertEquals(1, s.count(LongPoint.newSetQuery("longs", 2)));
assertEquals(1, s.count(FloatPoint.newSetQuery("floats", 2)));
assertEquals(1, s.count(DoublePoint.newSetQuery("doubles", 2)));
}
CoreParserTestIndexData(Analyzer analyzer) throws Exception {
BufferedReader d = new BufferedReader(new InputStreamReader(
TestCoreParser.class.getResourceAsStream("reuters21578.txt"), StandardCharsets.US_ASCII));
dir = LuceneTestCase.newDirectory();
IndexWriter writer = new IndexWriter(dir, LuceneTestCase.newIndexWriterConfig(analyzer));
String line = d.readLine();
while (line != null) {
int endOfDate = line.indexOf('\t');
String date = line.substring(0, endOfDate).trim();
String content = line.substring(endOfDate).trim();
Document doc = new Document();
doc.add(LuceneTestCase.newTextField("date", date, Field.Store.YES));
doc.add(LuceneTestCase.newTextField("contents", content, Field.Store.YES));
doc.add(new IntPoint("date3", Integer.parseInt(date)));
writer.addDocument(doc);
line = d.readLine();
}
d.close();
writer.close();
reader = DirectoryReader.open(dir);
searcher = LuceneTestCase.newSearcher(reader, false);
}
private Query createIntRangeQuery(final String name, final Object value,
final ConditionType type, final boolean minInclusive, final boolean maxInclusive) {
final Integer intValue = Integer.valueOf(value.toString());
Integer min = getMin(type, intValue);
if (min == null) {
min = Integer.MIN_VALUE;
} else if (!minInclusive) {
min = Math.addExact(min, 1);
}
Integer max = getMax(type, intValue);
if (max == null) {
max = Integer.MAX_VALUE;
} else if (!maxInclusive) {
max = Math.addExact(max, -1);
}
return IntPoint.newRangeQuery(name, min, max);
}
@Override
public void setUp() throws Exception {
super.setUp();
directory = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), directory,
newIndexWriterConfig(new MockAnalyzer(random())).setMergePolicy(newLogMergePolicy()));
for (int i = 0; i < docFields.length; i++) {
Document doc = new Document();
doc.add(newField(FIELD_WITH_OFFSETS, docFields[i], OFFSETS));
doc.add(newField(FIELD_NO_OFFSETS, docFields[i], TextField.TYPE_STORED));
doc.add(newField(FIELD_DOCS_ONLY, docFields[i], DOCS));
doc.add(newField(FIELD_FREQS, docFields[i], DOCS_AND_FREQS));
doc.add(new IntPoint(FIELD_POINT, 10));
doc.add(new NumericDocValuesField(FIELD_POINT, 10));
doc.add(new NumericDocValuesField("id", i));
doc.add(newField("id", Integer.toString(i), TextField.TYPE_STORED));
writer.addDocument(doc);
}
writer.forceMerge(1);
reader = writer.getReader();
writer.close();
searcher = newSearcher(getOnlyLeafReader(reader));
}
public void testToString() throws Exception {
// ints
assertEquals("field:[1 TO 2]", IntPoint.newRangeQuery("field", 1, 2).toString());
assertEquals("field:[-2 TO 1]", IntPoint.newRangeQuery("field", -2, 1).toString());
// longs
assertEquals("field:[1099511627776 TO 2199023255552]", LongPoint.newRangeQuery("field", 1L<<40, 1L<<41).toString());
assertEquals("field:[-5 TO 6]", LongPoint.newRangeQuery("field", -5L, 6L).toString());
// floats
assertEquals("field:[1.3 TO 2.5]", FloatPoint.newRangeQuery("field", 1.3F, 2.5F).toString());
assertEquals("field:[-2.9 TO 1.0]", FloatPoint.newRangeQuery("field", -2.9F, 1.0F).toString());
// doubles
assertEquals("field:[1.3 TO 2.5]", DoublePoint.newRangeQuery("field", 1.3, 2.5).toString());
assertEquals("field:[-2.9 TO 1.0]", DoublePoint.newRangeQuery("field", -2.9, 1.0).toString());
// n-dimensional double
assertEquals("field:[1.3 TO 2.5],[-2.9 TO 1.0]", DoublePoint.newRangeQuery("field",
new double[] { 1.3, -2.9 },
new double[] { 2.5, 1.0 }).toString());
}
public void testBasicMultiDimPointInSetQuery() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setCodec(getCodec());
IndexWriter w = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new IntPoint("int", 17, 42));
w.addDocument(doc);
IndexReader r = DirectoryReader.open(w);
IndexSearcher s = newSearcher(r, false);
assertEquals(0, s.count(newMultiDimIntSetQuery("int", 2, 17, 41)));
assertEquals(1, s.count(newMultiDimIntSetQuery("int", 2, 17, 42)));
assertEquals(1, s.count(newMultiDimIntSetQuery("int", 2, -7, -7, 17, 42)));
assertEquals(1, s.count(newMultiDimIntSetQuery("int", 2, 17, 42, -14, -14)));
w.close();
r.close();
dir.close();
}
@Override
public Query termsQuery(String field, List<Object> values) {
int[] v = new int[values.size()];
int upTo = 0;
for (int i = 0; i < values.size(); i++) {
Object value = values.get(i);
if (!hasDecimalPart(value)) {
v[upTo++] = parse(value, true);
}
}
if (upTo == 0) {
return Queries.newMatchNoDocsQuery("All values have a decimal part");
}
if (upTo != v.length) {
v = Arrays.copyOf(v, upTo);
}
return IntPoint.newSetQuery(field, v);
}
public void testEmptyPointInSetQuery() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig();
iwc.setCodec(getCodec());
IndexWriter w = new IndexWriter(dir, iwc);
Document doc = new Document();
doc.add(new IntPoint("int", 17));
doc.add(new LongPoint("long", 17L));
doc.add(new FloatPoint("float", 17.0f));
doc.add(new DoublePoint("double", 17.0));
doc.add(new BinaryPoint("bytes", new byte[] {0, 17}));
w.addDocument(doc);
IndexReader r = DirectoryReader.open(w);
IndexSearcher s = newSearcher(r, false);
assertEquals(0, s.count(IntPoint.newSetQuery("int")));
assertEquals(0, s.count(LongPoint.newSetQuery("long")));
assertEquals(0, s.count(FloatPoint.newSetQuery("float")));
assertEquals(0, s.count(DoublePoint.newSetQuery("double")));
assertEquals(0, s.count(BinaryPoint.newSetQuery("bytes")));
w.close();
r.close();
dir.close();
}
public void testPointInSetQueryToString() throws Exception {
// int
assertEquals("int:{-42 18}", IntPoint.newSetQuery("int", -42, 18).toString());
// long
assertEquals("long:{-42 18}", LongPoint.newSetQuery("long", -42L, 18L).toString());
// float
assertEquals("float:{-42.0 18.0}", FloatPoint.newSetQuery("float", -42.0f, 18.0f).toString());
// double
assertEquals("double:{-42.0 18.0}", DoublePoint.newSetQuery("double", -42.0, 18.0).toString());
// binary
assertEquals("bytes:{[12] [2a]}", BinaryPoint.newSetQuery("bytes", new byte[] {42}, new byte[] {18}).toString());
}
@Before
public void setUp() throws Exception {
analyzer = new StandardAnalyzer();
tempDirectory = Files.createTempDirectory("lucene");
directory = new MMapDirectory(tempDirectory);
IndexWriterConfig config = new IndexWriterConfig(analyzer);
IndexWriter iwriter = new IndexWriter(directory, config);
Document doc = new Document();
doc.add(new Field("contents", "name=text", TextField.TYPE_STORED));
IntPoint intPoint = new IntPoint("intfield", 4);
doc.add(intPoint);
doc.add(new StoredField("intfield", 4));
iwriter.addDocument(doc);
iwriter.close();
ireader = DirectoryReader.open(directory);
isearcher = new IndexSearcher(ireader);
}
public void testIllegalTooManyDimensions() throws Exception {
Directory dir = newDirectory();
IndexWriterConfig iwc = new IndexWriterConfig(new MockAnalyzer(random()));
IndexWriter w = new IndexWriter(dir, iwc);
Document doc = new Document();
byte[][] values = new byte[PointValues.MAX_INDEX_DIMENSIONS +1][];
for(int i=0;i<values.length;i++) {
values[i] = new byte[4];
}
expectThrows(IllegalArgumentException.class, () -> {
doc.add(new BinaryPoint("dim", values));
});
Document doc2 = new Document();
doc2.add(new IntPoint("dim", 17));
w.addDocument(doc2);
w.close();
dir.close();
}
public void testMergedStatsOneSegmentWithoutPoints() throws IOException {
Directory dir = new ByteBuffersDirectory();
IndexWriter w = new IndexWriter(dir, new IndexWriterConfig(null).setMergePolicy(NoMergePolicy.INSTANCE));
w.addDocument(new Document());
DirectoryReader.open(w).close();
Document doc = new Document();
doc.add(new IntPoint("field", Integer.MIN_VALUE));
w.addDocument(doc);
IndexReader reader = DirectoryReader.open(w);
assertArrayEquals(new byte[4], PointValues.getMinPackedValue(reader, "field"));
assertArrayEquals(new byte[4], PointValues.getMaxPackedValue(reader, "field"));
assertEquals(1, PointValues.getDocCount(reader, "field"));
assertEquals(1, PointValues.size(reader, "field"));
assertNull(PointValues.getMinPackedValue(reader, "field2"));
assertNull(PointValues.getMaxPackedValue(reader, "field2"));
assertEquals(0, PointValues.getDocCount(reader, "field2"));
assertEquals(0, PointValues.size(reader, "field2"));
}
@Override
public Query whr(String k, Object n, Object x, boolean l, boolean g) {
if (n == null && x == null) {
throw new NullPointerException("Range for "+k+" must be number, but null");
}
int n2, x2;
if (n == null || "".equals(n)) {
n2 = Integer.MIN_VALUE;
} else {
n2 = Synt.asInt(n);
if (!l) {
n2 = n2 + 1;
}
}
if (x == null || "".equals(x)) {
x2 = Integer.MAX_VALUE;
} else {
x2 = Synt.asInt(x);
if (!g) {
x2 = x2 - 1;
}
}
Query q2 = IntPoint.newRangeQuery("@"+k, n2, x2);
return q2;
}
@Override
public void visit(int docID, byte[] packedValue) throws IOException {
// TODO: handle filter or deleted documents?
int v = IntPoint.decodeDimension(packedValue, 0);
if (v < last) return;
if (v == last && pos >= 0) {
count[pos]++;
} else {
if (pos+1 < values.length) {
last = v;
++pos;
values[pos] = v;
count[pos] = 1;
} else {
// a new value we don't have room for
throw breakException;
}
}
}
@Override
public Query getPointRangeQuery(QParser parser, SchemaField field, String min, String max, boolean minInclusive,
boolean maxInclusive) {
int actualMin, actualMax;
if (min == null) {
actualMin = Integer.MIN_VALUE;
} else {
actualMin = parseIntFromUser(field.getName(), min);
if (!minInclusive) {
if (actualMin == Integer.MAX_VALUE) return new MatchNoDocsQuery();
actualMin++;
}
}
if (max == null) {
actualMax = Integer.MAX_VALUE;
} else {
actualMax = parseIntFromUser(field.getName(), max);
if (!maxInclusive) {
if (actualMax == Integer.MIN_VALUE) return new MatchNoDocsQuery();
actualMax--;
}
}
return IntPoint.newRangeQuery(field.getName(), actualMin, actualMax);
}
@Override
public Iterable<IndexableField> convert(LuceneContext context, String path, Field field, LuceneIndex annotation, Type type, Object data) {
Collection<IndexableField> indexables = new LinkedList<>();
Class<?> clazz = TypeUtility.getRawType(type, null);
clazz = ClassUtility.primitiveToWrapper(clazz);
if (Byte.class.isAssignableFrom(clazz)) {
indexables.add(new IntPoint(path, (byte) data));
return indexables;
}
if (Short.class.isAssignableFrom(clazz)) {
indexables.add(new IntPoint(path, (short) data));
return indexables;
}
if (Integer.class.isAssignableFrom(clazz)) {
indexables.add(new IntPoint(path, (int) data));
return indexables;
}
if (Long.class.isAssignableFrom(clazz)) {
indexables.add(new LongPoint(path, (long) data));
return indexables;
}
if (Float.class.isAssignableFrom(clazz)) {
indexables.add(new FloatPoint(path, (float) data));
return indexables;
}
if (Double.class.isAssignableFrom(clazz)) {
indexables.add(new DoublePoint(path, (double) data));
return indexables;
}
throw new StorageException();
}
@Override
public Iterable<IndexableField> convert(LuceneContext context, String path, Field field, LuceneIndex annotation, Type type, Object data) {
Collection<IndexableField> indexables = new LinkedList<>();
Class<?> clazz = TypeUtility.getRawType(type, null);
clazz = ClassUtility.primitiveToWrapper(clazz);
if (AtomicBoolean.class.isAssignableFrom(clazz)) {
indexables.add(new IntPoint(path, AtomicBoolean.class.cast(data).get() ? 1 : 0));
return indexables;
}
if (Boolean.class.isAssignableFrom(clazz)) {
indexables.add(new IntPoint(path, Boolean.class.cast(data) ? 1 : 0));
return indexables;
}
throw new StorageException();
}
@Test
public void testPointExactQuery() throws Exception {
// 精确查询
Query exactQuery = IntPoint.newExactQuery("id", 1);
TopDocs search = searcher.search(exactQuery, 1000);
Assert.assertEquals(1, search.totalHits.value);
}
@Test
public void testPointRangeQuery() throws Exception {
// 范围查询
Query rangeQuery = IntPoint.newRangeQuery("id", 501, 1000);
TopDocs search = searcher.search(rangeQuery, 1000);
Assert.assertEquals(500, search.totalHits.value);
}
@Test
public void testPointSetQuery() throws Exception {
// 集合查询
Query setQuery = IntPoint.newSetQuery("id", 1, 10, 100, 1000);
TopDocs search = searcher.search(setQuery, 1000);
Assert.assertEquals(4, search.totalHits.value);
}
@Test
public void testSuggestOnMostlyDeletedDocuments() throws Exception {
Analyzer analyzer = new MockAnalyzer(random());
// using IndexWriter instead of RandomIndexWriter
IndexWriter iw = new IndexWriter(dir, iwcWithSuggestField(analyzer, "suggest_field"));
int num = Math.min(1000, atLeast(10));
for (int i = 1; i <= num; i++) {
Document document = new Document();
document.add(new SuggestField("suggest_field", "abc_" + i, i));
document.add(new StoredField("weight_fld", i));
document.add(new IntPoint("weight_fld", i));
iw.addDocument(document);
if (usually()) {
iw.commit();
}
}
iw.deleteDocuments(IntPoint.newRangeQuery("weight_fld", 2, Integer.MAX_VALUE));
DirectoryReader reader = DirectoryReader.open(iw);
SuggestIndexSearcher indexSearcher = new SuggestIndexSearcher(reader);
PrefixCompletionQuery query = new PrefixCompletionQuery(analyzer, new Term("suggest_field", "abc_"));
TopSuggestDocs suggest = indexSearcher.suggest(query, 1, false);
assertSuggestions(suggest, new Entry("abc_1", 1));
reader.close();
iw.close();
}
private Document makeJob(String skill, int year) {
Document job = new Document();
job.add(newStringField("skill", skill, Field.Store.YES));
job.add(new IntPoint("year", year));
job.add(new StoredField("year", year));
return job;
}
private void addLinkFields(final Random random, Document document, final String fieldName, String linkValue,
boolean multipleValuesPerDocument, boolean globalOrdinalJoin) {
document.add(newTextField(random, fieldName, linkValue, Field.Store.NO));
final int linkInt = Integer.parseUnsignedInt(linkValue,16);
document.add(new IntPoint(fieldName + "INT", linkInt));
document.add(new FloatPoint(fieldName + "FLOAT", linkInt));
final long linkLong = linkInt<<32 | linkInt;
document.add(new LongPoint(fieldName + "LONG", linkLong));
document.add(new DoublePoint(fieldName + "DOUBLE", linkLong));
if (multipleValuesPerDocument) {
document.add(new SortedSetDocValuesField(fieldName, new BytesRef(linkValue)));
document.add(new SortedNumericDocValuesField(fieldName+ "INT", linkInt));
document.add(new SortedNumericDocValuesField(fieldName+ "FLOAT", Float.floatToRawIntBits(linkInt)));
document.add(new SortedNumericDocValuesField(fieldName+ "LONG", linkLong));
document.add(new SortedNumericDocValuesField(fieldName+ "DOUBLE", Double.doubleToRawLongBits(linkLong)));
} else {
document.add(new SortedDocValuesField(fieldName, new BytesRef(linkValue)));
document.add(new NumericDocValuesField(fieldName+ "INT", linkInt));
document.add(new FloatDocValuesField(fieldName+ "FLOAT", linkInt));
document.add(new NumericDocValuesField(fieldName+ "LONG", linkLong));
document.add(new DoubleDocValuesField(fieldName+ "DOUBLE", linkLong));
}
if (globalOrdinalJoin) {
document.add(new SortedDocValuesField("join_field", new BytesRef(linkValue)));
}
}
public void testIntRandomMultiRangeQuery() throws IOException {
final int numDims = TestUtil.nextInt(random(), 1, 3);
final int numVals = TestUtil.nextInt(random(), 3, 8);
Directory dir = newDirectory();
RandomIndexWriter w = new RandomIndexWriter(random(), dir);
Document doc = new Document();
int[] value = new int[numDims];
for (int i = 0; i < numDims; ++i) {
value[i] = TestUtil.nextInt(random(), 1, 10);
}
doc.add(new IntPoint("point", value));
w.addDocument(doc);
IndexReader reader = w.getReader();
IndexSearcher searcher = new IndexSearcher(reader);
searcher.setQueryCache(null);
IntPointMultiRangeBuilder builder = new IntPointMultiRangeBuilder("point", numDims);
for (int j = 0;j < numVals; j++) {
int[] lowerBound = new int[numDims];
int[] upperBound = new int[numDims];
for (int i = 0; i < numDims; ++i) {
lowerBound[i] = value[i] - random().nextInt(1);
upperBound[i] = value[i] + random().nextInt(1);
}
builder.add(lowerBound, upperBound);
}
Query query = builder.build();
searcher.search(query, Integer.MAX_VALUE);
reader.close();
w.close();
dir.close();
}
@Override
protected void addRandomFields(Document doc) {
final int numValues = random().nextInt(3);
for (int i = 0; i < numValues; i++) {
doc.add(new IntPoint("f", random().nextInt()));
}
}
public DocState() {
doc = new Document();
title = new StringField("title", "", Field.Store.NO);
doc.add(title);
FieldType ft = new FieldType(TextField.TYPE_STORED);
ft.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS);
ft.setStoreTermVectors(true);
ft.setStoreTermVectorOffsets(true);
ft.setStoreTermVectorPositions(true);
titleTokenized = new Field("titleTokenized", "", ft);
doc.add(titleTokenized);
body = new Field("body", "", ft);
doc.add(body);
id = new StringField("docid", "", Field.Store.YES);
doc.add(id);
idNum = new IntPoint("docid_int", 0);
doc.add(idNum);
date = new StringField("date", "", Field.Store.YES);
doc.add(date);
titleDV = new SortedDocValuesField("titleDV", new BytesRef());
idNumDV = new NumericDocValuesField("docid_intDV", 0);
doc.add(titleDV);
doc.add(idNumDV);
}
@Override
public Query getQuery(Element e) throws ParserException {
String field = DOMUtils.getAttributeWithInheritanceOrFail(e, "fieldName");
final String lowerTerm = DOMUtils.getAttribute(e, "lowerTerm", null);
final String upperTerm = DOMUtils.getAttribute(e, "upperTerm", null);
String type = DOMUtils.getAttribute(e, "type", "int");
try {
if (type.equalsIgnoreCase("int")) {
return IntPoint.newRangeQuery(field,
(lowerTerm == null ? Integer.MIN_VALUE : Integer.parseInt(lowerTerm)),
(upperTerm == null ? Integer.MAX_VALUE : Integer.parseInt(upperTerm)));
} else if (type.equalsIgnoreCase("long")) {
return LongPoint.newRangeQuery(field,
(lowerTerm == null ? Long.MIN_VALUE : Long.parseLong(lowerTerm)),
(upperTerm == null ? Long.MAX_VALUE : Long.parseLong(upperTerm)));
} else if (type.equalsIgnoreCase("double")) {
return DoublePoint.newRangeQuery(field,
(lowerTerm == null ? Double.NEGATIVE_INFINITY : Double.parseDouble(lowerTerm)),
(upperTerm == null ? Double.POSITIVE_INFINITY : Double.parseDouble(upperTerm)));
} else if (type.equalsIgnoreCase("float")) {
return FloatPoint.newRangeQuery(field,
(lowerTerm == null ? Float.NEGATIVE_INFINITY : Float.parseFloat(lowerTerm)),
(upperTerm == null ? Float.POSITIVE_INFINITY : Float.parseFloat(upperTerm)));
} else {
throw new ParserException("type attribute must be one of: [long, int, double, float]");
}
} catch (NumberFormatException nfe) {
throw new ParserException("Could not parse lowerTerm or upperTerm into a number", nfe);
}
}