下面列出了怎么用org.antlr.runtime.ANTLRStringStream的API类实例代码及写法,或者点击链接到github查看源代码。
/** Parse an expression from text */
public ConstraintConnectiveDescr parse( final String text ) {
ConstraintConnectiveDescr constraint = null;
try {
DRLLexer lexer = DRLFactory.getDRLLexer(new ANTLRStringStream(text), languageLevel);
CommonTokenStream input = new CommonTokenStream( lexer );
RecognizerSharedState state = new RecognizerSharedState();
helper = new ParserHelper( input, state, languageLevel );
DRLExpressions parser = DRLFactory.getDRLExpressions(input, state, helper, languageLevel);
parser.setBuildDescr( true );
parser.setLeftMostExpr( null ); // setting initial value just in case
BaseDescr expr = parser.conditionalOrExpression();
if ( expr != null && !parser.hasErrors() ) {
constraint = ConstraintConnectiveDescr.newAnd();
constraint.addOrMerge( expr );
}
} catch ( RecognitionException e ) {
helper.reportError( e );
}
return constraint;
}
public void test2ReplaceMiddleIndex1InsertBefore() throws Exception {
Grammar g = new Grammar(
"lexer grammar t;\n"+
"A : 'a';\n" +
"B : 'b';\n" +
"C : 'c';\n");
CharStream input = new ANTLRStringStream("abc");
Interpreter lexEngine = new Interpreter(g, input);
TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
tokens.LT(1); // fill buffer
tokens.insertBefore(0, "_");
tokens.replace(1, "x");
tokens.replace(1, "y");
String result = tokens.toString();
String expecting = "_ayc";
assertEquals(expecting, result);
}
@Test
public void selectGeoWithAnd() throws RecognitionException {
String queryString = "select * where location within 20000 of 37,-75 "
+ "and created > 1407776999925 and created < 1407777000266";
ANTLRStringStream in = new ANTLRStringStream( queryString );
CpQueryFilterLexer lexer = new CpQueryFilterLexer( in );
TokenRewriteStream tokens = new TokenRewriteStream( lexer );
CpQueryFilterParser parser = new CpQueryFilterParser( tokens );
ParsedQuery query = parser.ql().parsedQuery;
AndOperand andOp1 = ( AndOperand ) query.getRootOperand();
AndOperand andOp2 = ( AndOperand ) andOp1.getLeft();
WithinOperand withinOperand = ( WithinOperand ) andOp2.getLeft();
assertEquals( "location", withinOperand.getProperty().getValue() );
assertEquals( 20000, withinOperand.getDistance().getFloatValue(), 0 );
assertEquals( 37f, withinOperand.getLatitude().getFloatValue(), 0 );
assertEquals( -75f, withinOperand.getLongitude().getFloatValue(), 0 );
}
public void test2InsertMiddleIndex() throws Exception {
Grammar g = new Grammar(
"lexer grammar t;\n"+
"A : 'a';\n" +
"B : 'b';\n" +
"C : 'c';\n");
CharStream input = new ANTLRStringStream("abc");
Interpreter lexEngine = new Interpreter(g, input);
TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
tokens.LT(1); // fill buffer
tokens.insertBefore(1, "x");
tokens.insertBefore(1, "y");
String result = tokens.toString();
String expecting = "ayxbc";
assertEquals(expecting, result);
}
public void testReplaceRangeThenInsertAfterRightEdge() throws Exception {
Grammar g = new Grammar(
"lexer grammar t;\n"+
"A : 'a';\n" +
"B : 'b';\n" +
"C : 'c';\n");
CharStream input = new ANTLRStringStream("abcccba");
Interpreter lexEngine = new Interpreter(g, input);
TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
tokens.LT(1); // fill buffer
tokens.replace(2, 4, "x");
tokens.insertAfter(4, "y");
String result = tokens.toString();
String expecting = "abxyba";
assertEquals(expecting, result);
}
private LogicalExpression parseExpression(String expr) throws RecognitionException, IOException{
ExprLexer lexer = new ExprLexer(new ANTLRStringStream(expr));
CommonTokenStream tokens = new CommonTokenStream(lexer);
// tokens.fill();
// for(Token t : (List<Token>) tokens.getTokens()){
// System.out.println(t + "" + t.getType());
// }
// tokens.rewind();
ExprParser parser = new ExprParser(tokens);
parse_return ret = parser.parse();
return ret.e;
}
public void testInsertThenReplaceSameIndex() throws Exception {
Grammar g = new Grammar(
"lexer grammar t;\n"+
"A : 'a';\n" +
"B : 'b';\n" +
"C : 'c';\n");
CharStream input = new ANTLRStringStream("abc");
Interpreter lexEngine = new Interpreter(g, input);
TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
tokens.LT(1); // fill buffer
tokens.insertBefore(0, "0");
tokens.replace(0, "x"); // supercedes insert at 0
String result = tokens.toString();
String expecting = "xbc";
assertEquals(expecting, result);
}
public void test2ReplaceMiddleIndex() throws Exception {
Grammar g = new Grammar(
"lexer grammar t;\n"+
"A : 'a';\n" +
"B : 'b';\n" +
"C : 'c';\n");
CharStream input = new ANTLRStringStream("abc");
Interpreter lexEngine = new Interpreter(g, input);
TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
tokens.LT(1); // fill buffer
tokens.replace(1, "x");
tokens.replace(1, "y");
String result = tokens.toString();
String expecting = "ayc";
assertEquals(expecting, result);
}
protected void createContextsForLastCompleteNode(EObject previousModel, boolean strict) throws BadLocationException {
String currentNodePrefix = getPrefix(currentNode);
if (!Strings.isEmpty(currentNodePrefix) && !currentNode.getText().equals(currentNodePrefix)) {
lexer.setCharStream(new ANTLRStringStream(currentNodePrefix));
Token token = lexer.nextToken();
if (token == Token.EOF_TOKEN) { // error case - nothing could be parsed
return;
}
while(token != Token.EOF_TOKEN) {
if (isErrorToken(token))
return;
token = lexer.nextToken();
}
}
String prefix = "";
String completeInput = getInputToParse(completionOffset);
Collection<FollowElement> followElements = parser.getFollowElements(completeInput, strict);
doCreateContexts(lastCompleteNode, currentNode, prefix, previousModel, followElements);
}
@Override
protected void createContextsForLastCompleteNode(EObject previousModel, boolean strict)
throws BadLocationException {
String currentNodePrefix = getPrefix(currentNode);
if (!Strings.isEmpty(currentNodePrefix) && !currentNode.getText().equals(currentNodePrefix)) {
lexer.setCharStream(new ANTLRStringStream(currentNodePrefix));
Token token = lexer.nextToken();
if (token == Token.EOF_TOKEN) {
return;
}
while (token != Token.EOF_TOKEN) {
if (isErrorToken(token)) {
return;
}
token = lexer.nextToken();
}
}
String prefix = "";
Collection<FollowElement> followElements = parseFollowElements(completionOffset, strict);
doCreateContexts(lastCompleteNode, currentNode, prefix, previousModel, followElements);
}
public void generateDependencies(String text, EGTask task) throws Exception {
try {
this.task = task;
DependenciesLexer lex = new DependenciesLexer(new ANTLRStringStream(text));
CommonTokenStream tokens = new CommonTokenStream(lex);
DependenciesParser g = new DependenciesParser(tokens);
try {
g.setGenerator(this);
g.prog();
} catch (RecognitionException ex) {
logger.error("Unable to load mapping task: " + ex.getMessage());
throw new ParserException(ex);
}
} catch (Exception e) {
e.printStackTrace();
logger.error(e.getLocalizedMessage());
throw new ParserException(e);
}
}
@Test
public void testRemoveErrorListener() throws Exception
{
SyntaxErrorCounter firstCounter = new SyntaxErrorCounter();
SyntaxErrorCounter secondCounter = new SyntaxErrorCounter();
CharStream stream = new ANTLRStringStream("SELECT * FORM test;");
CqlLexer lexer = new CqlLexer(stream);
TokenStream tokenStream = new CommonTokenStream(lexer);
CqlParser parser = new CqlParser(tokenStream);
parser.addErrorListener(firstCounter);
parser.addErrorListener(secondCounter);
parser.removeErrorListener(secondCounter);
parser.query();
assertTrue(firstCounter.count > 0);
assertEquals(0, secondCounter.count);
}
@Override
public void load() {
if ( alreadyLoaded ) return;
alreadyLoaded = true;
GroupParser parser;
try {
ANTLRStringStream fs = new ANTLRStringStream(text);
fs.name = sourceName;
GroupLexer lexer = new GroupLexer(fs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
parser = new GroupParser(tokens);
// no prefix since this group file is the entire group, nothing lives
// beneath it.
parser.group(this, "/");
}
catch (Exception e) {
errMgr.IOError(null, ErrorType.CANT_LOAD_GROUP_FILE, e, "<string>");
}
}
@Test
public void testAddErrorListener() throws Exception
{
SyntaxErrorCounter firstCounter = new SyntaxErrorCounter();
SyntaxErrorCounter secondCounter = new SyntaxErrorCounter();
CharStream stream = new ANTLRStringStream("SELECT * FORM test;");
CqlLexer lexer = new CqlLexer(stream);
TokenStream tokenStream = new CommonTokenStream(lexer);
CqlParser parser = new CqlParser(tokenStream);
parser.addErrorListener(firstCounter);
parser.addErrorListener(secondCounter);
parser.query();
// ANTLR 3.5 reports 2 errors in the sentence above (missing FROM and missing EOF).
assertTrue(firstCounter.count > 0);
assertTrue(secondCounter.count > 0);
}
@Override
public void load() {
if ( alreadyLoaded ) return;
alreadyLoaded = true;
GroupParser parser;
try {
ANTLRStringStream fs = new ANTLRStringStream(text);
fs.name = sourceName;
GroupLexer lexer = new GroupLexer(fs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
parser = new GroupParser(tokens);
// no prefix since this group file is the entire group, nothing lives
// beneath it.
parser.group(this, "/");
}
catch (Exception e) {
errMgr.IOError(null, ErrorType.CANT_LOAD_GROUP_FILE, e, "<string>");
}
}
@Override
public void load() {
if ( alreadyLoaded ) return;
alreadyLoaded = true;
GroupParser parser;
try {
ANTLRStringStream fs = new ANTLRStringStream(text);
fs.name = sourceName;
GroupLexer lexer = new GroupLexer(fs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
parser = new GroupParser(tokens);
// no prefix since this group file is the entire group, nothing lives
// beneath it.
parser.group(this, "/");
}
catch (Exception e) {
errMgr.IOError(null, ErrorType.CANT_LOAD_GROUP_FILE, e, "<string>");
}
}
public void testInsertThenReplaceLastIndex() throws Exception {
Grammar g = new Grammar(
"lexer grammar t;\n"+
"A : 'a';\n" +
"B : 'b';\n" +
"C : 'c';\n");
CharStream input = new ANTLRStringStream("abc");
Interpreter lexEngine = new Interpreter(g, input);
TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
tokens.LT(1); // fill buffer
tokens.insertBefore(2, "y");
tokens.replace(2, "x");
String result = tokens.toString();
String expecting = "abx";
assertEquals(expecting, result);
}
public void testReplaceSingleMiddleThenOverlappingSuperset() throws Exception {
Grammar g = new Grammar(
"lexer grammar t;\n"+
"A : 'a';\n" +
"B : 'b';\n" +
"C : 'c';\n");
CharStream input = new ANTLRStringStream("abcba");
Interpreter lexEngine = new Interpreter(g, input);
TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
tokens.LT(1); // fill buffer
tokens.replace(2, 2, "xyz");
tokens.replace(0, 3, "foo");
String result = tokens.toString();
String expecting = "fooa";
assertEquals(expecting, result);
}
@Override
public void load() {
if ( alreadyLoaded ) return;
alreadyLoaded = true;
GroupParser parser;
try {
ANTLRStringStream fs = new ANTLRStringStream(text);
fs.name = sourceName;
GroupLexer lexer = new GroupLexer(fs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
parser = new GroupParser(tokens);
// no prefix since this group file is the entire group, nothing lives
// beneath it.
parser.group(this, "/");
}
catch (Exception e) {
errMgr.IOError(null, ErrorType.CANT_LOAD_GROUP_FILE, e, "<string>");
}
}
public void testReplaceThenReplaceLowerIndexedSuperset() throws Exception {
Grammar g = new Grammar(
"lexer grammar t;\n"+
"A : 'a';\n" +
"B : 'b';\n" +
"C : 'c';\n");
CharStream input = new ANTLRStringStream("abcccba");
Interpreter lexEngine = new Interpreter(g, input);
TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
tokens.LT(1); // fill buffer
tokens.replace(2, 4, "xyz");
tokens.replace(1, 3, "foo"); // overlap, error
Exception exc = null;
try {
tokens.toString();
}
catch (IllegalArgumentException iae) {
exc = iae;
}
String expecting = "replace op boundaries of <[email protected]:\"foo\"> overlap with previous <[email protected]:\"xyz\">";
assertNotNull(exc);
assertEquals(expecting, exc.getMessage());
}
@Override
public void load() {
if ( alreadyLoaded ) return;
alreadyLoaded = true;
GroupParser parser;
try {
ANTLRStringStream fs = new ANTLRStringStream(text);
fs.name = sourceName;
GroupLexer lexer = new GroupLexer(fs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
parser = new GroupParser(tokens);
// no prefix since this group file is the entire group, nothing lives
// beneath it.
parser.group(this, "/");
}
catch (Exception e) {
errMgr.IOError(null, ErrorType.CANT_LOAD_GROUP_FILE, e, "<string>");
}
}
@Override
public void load() {
if ( alreadyLoaded ) return;
alreadyLoaded = true;
GroupParser parser;
try {
ANTLRStringStream fs = new ANTLRStringStream(text);
fs.name = sourceName;
GroupLexer lexer = new GroupLexer(fs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
parser = new GroupParser(tokens);
// no prefix since this group file is the entire group, nothing lives
// beneath it.
parser.group(this, "/");
}
catch (Exception e) {
errMgr.IOError(null, ErrorType.CANT_LOAD_GROUP_FILE, e, "<string>");
}
}
@Override
public void load() {
if ( alreadyLoaded ) return;
alreadyLoaded = true;
GroupParser parser;
try {
ANTLRStringStream fs = new ANTLRStringStream(text);
fs.name = sourceName;
GroupLexer lexer = new GroupLexer(fs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
parser = new GroupParser(tokens);
// no prefix since this group file is the entire group, nothing lives
// beneath it.
parser.group(this, "/");
}
catch (Exception e) {
errMgr.IOError(null, ErrorType.CANT_LOAD_GROUP_FILE, e, "<string>");
}
}
public void parseEvidenceString(String chunk, long lineOffset){
ANTLRStringStream input = new ANTLRStringStream(chunk);
MLNLexer lexer = new MLNLexer(input);
CommonTokenStream tokens = new CommonTokenStream(lexer);
MLNParser parser = new MLNParser(tokens);
parser.lineOffset = lineOffset;
parser.ml = this.mln;
try {
parser.evidenceList();
parser.reset();
tokens.reset();
lexer.reset();
input.reset();
parser.ml = null;
parser = null;
} catch (Exception e) {
mln.closeFiles();
ExceptionMan.handle(e);
}
}
@Override
public void load() {
if ( alreadyLoaded ) return;
alreadyLoaded = true;
GroupParser parser;
try {
ANTLRStringStream fs = new ANTLRStringStream(text);
fs.name = sourceName;
GroupLexer lexer = new GroupLexer(fs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
parser = new GroupParser(tokens);
// no prefix since this group file is the entire group, nothing lives
// beneath it.
parser.group(this, "/");
}
catch (Exception e) {
errMgr.IOError(null, ErrorType.CANT_LOAD_GROUP_FILE, e, "<string>");
}
}
@Test
public void selectGeo() throws RecognitionException {
String queryString = "select * where a within .1 of -40.343666, 175.630917";
ANTLRStringStream in = new ANTLRStringStream( queryString );
CpQueryFilterLexer lexer = new CpQueryFilterLexer( in );
TokenRewriteStream tokens = new TokenRewriteStream( lexer );
CpQueryFilterParser parser = new CpQueryFilterParser( tokens );
ParsedQuery query = parser.ql().parsedQuery;
WithinOperand operand = ( WithinOperand ) query.getRootOperand();
assertEquals( "a", operand.getProperty().getValue() );
assertEquals( .1f, operand.getDistance().getFloatValue(), 0 );
assertEquals( -40.343666f, operand.getLatitude().getFloatValue(), 0 );
assertEquals( 175.630917f, operand.getLongitude().getFloatValue(), 0 );
}
public void testDisjointInserts() throws Exception {
Grammar g = new Grammar(
"lexer grammar t;\n"+
"A : 'a';\n" +
"B : 'b';\n" +
"C : 'c';\n");
CharStream input = new ANTLRStringStream("abc");
Interpreter lexEngine = new Interpreter(g, input);
TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
tokens.LT(1); // fill buffer
tokens.insertBefore(1, "x");
tokens.insertBefore(2, "y");
tokens.insertBefore(0, "z");
String result = tokens.toString();
String expecting = "zaxbyc";
assertEquals(expecting, result);
}
@Override
public void load() {
if (alreadyLoaded) return;
alreadyLoaded = true;
GroupParser parser;
try {
ANTLRStringStream fs = new ANTLRStringStream(text);
fs.name = sourceName;
GroupLexer lexer = new GroupLexer(fs);
CommonTokenStream tokens = new CommonTokenStream(lexer);
parser = new GroupParser(tokens);
// no prefix since this group file is the entire group, nothing lives
// beneath it.
parser.group(this, "/");
}
catch (Exception e) {
errMgr.IOError(null, ErrorType.CANT_LOAD_GROUP_FILE, e, "<string>");
}
}
public void testOverlappingReplace3() throws Exception {
Grammar g = new Grammar(
"lexer grammar t;\n"+
"A : 'a';\n" +
"B : 'b';\n" +
"C : 'c';\n");
CharStream input = new ANTLRStringStream("abcc");
Interpreter lexEngine = new Interpreter(g, input);
TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
tokens.LT(1); // fill buffer
tokens.replace(1, 2, "foo");
tokens.replace(0, 2, "bar"); // wipes prior nested replace
String result = tokens.toString();
String expecting = "barc";
assertEquals(expecting, result);
}
public void testCombine3Inserts() throws Exception {
Grammar g = new Grammar(
"lexer grammar t;\n"+
"A : 'a';\n" +
"B : 'b';\n" +
"C : 'c';\n");
CharStream input = new ANTLRStringStream("abc");
Interpreter lexEngine = new Interpreter(g, input);
TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
tokens.LT(1); // fill buffer
tokens.insertBefore(1, "x");
tokens.insertBefore(0, "y");
tokens.insertBefore(1, "z");
String result = tokens.toString();
String expecting = "yazxbc";
assertEquals(expecting, result);
}