下面列出了java.io.StreamTokenizer#slashStarComments ( ) 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。
private void parse(Reader reader) throws ParseException, IOException
{
StreamTokenizer st = new StreamTokenizer(reader);
st.eolIsSignificant(true);
st.wordChars((int) '_', (int) '_');
st.parseNumbers();
st.quoteChar((int) '"');
// These calls caused comments to be discarded
st.slashSlashComments(true);
st.slashStarComments(true);
// Parse the file
ParserState currentState = this.getBeginningOfLineState();
while (currentState != null)
{
currentState = currentState.parse(st);
}
}
public JaasConfiguration(String loginContextName, String jaasConfigParams) {
StreamTokenizer tokenizer = new StreamTokenizer(new StringReader(jaasConfigParams));
tokenizer.slashSlashComments(true);
tokenizer.slashStarComments(true);
tokenizer.wordChars('-', '-');
tokenizer.wordChars('_', '_');
tokenizer.wordChars('$', '$');
try {
configEntries = new ArrayList<>();
while (tokenizer.nextToken() != StreamTokenizer.TT_EOF) {
configEntries.add(parseAppConfigurationEntry(tokenizer));
}
if (configEntries.isEmpty())
throw new IllegalArgumentException("Login module not specified in JAAS config");
this.loginContextName = loginContextName;
} catch (IOException e) {
throw new IllegalArgumentException("Unexpected exception while parsing JAAS config", e);
}
}
public Lexer(Reader r) {
input = new StreamTokenizer(r);
input.resetSyntax();
input.wordChars('!', '~');
input.ordinaryChar('/');
input.whitespaceChars('\u0000', ' ');
input.slashSlashComments(true);
input.slashStarComments(true);
input.eolIsSignificant(false);
input.commentChar('#');
}
public Lexer(Reader r, String systemId) {
this.systemId = systemId;
st = new StreamTokenizer(r);
st.eolIsSignificant(false);
st.lowerCaseMode(false);
st.slashSlashComments(true);
st.slashStarComments(true);
st.wordChars('a', 'z');
st.wordChars('A', 'Z');
st.wordChars(':', ':');
st.wordChars('_', '_');
st.quoteChar(SINGLE_QUOTE);
st.quoteChar(DOUBLE_QUOTE);
st.ordinaryChar(BEGIN_NODE_TYPE_NAME);
st.ordinaryChar(END_NODE_TYPE_NAME);
st.ordinaryChar(EXTENDS);
st.ordinaryChar(LIST_DELIMITER);
st.ordinaryChar(PROPERTY_DEFINITION);
st.ordinaryChar(CHILD_NODE_DEFINITION);
st.ordinaryChar(BEGIN_TYPE);
st.ordinaryChar(END_TYPE);
st.ordinaryChar(DEFAULT);
st.ordinaryChar(CONSTRAINT);
}
/**
* Extract the name of a Java source's package, or null if not found. This method is only used
* before javac parsing to determine the main type name.
*/
@VisibleForTesting
static String packageName(String source) {
try (StringReader r = new StringReader(source)) {
StreamTokenizer tokenizer = new StreamTokenizer(r);
tokenizer.slashSlashComments(true);
tokenizer.slashStarComments(true);
StringBuilder sb = new StringBuilder();
boolean inName = false;
while (tokenizer.nextToken() != StreamTokenizer.TT_EOF) {
if (inName) {
switch (tokenizer.ttype) {
case ';':
return sb.length() > 0 ? sb.toString() : null;
case '.':
sb.append('.');
break;
case StreamTokenizer.TT_WORD:
sb.append(tokenizer.sval);
break;
default:
inName = false; // Invalid package statement pattern.
break;
}
} else if (tokenizer.ttype == StreamTokenizer.TT_WORD && tokenizer.sval.equals("package")) {
inName = true;
}
}
return null; // Package statement not found.
} catch (IOException e) {
throw new AssertionError("Exception reading string: " + e);
}
}
private static void configureTokenizer(StreamTokenizer tokenizer) {
tokenizer.wordChars('_', '_');
tokenizer.slashSlashComments(true);
tokenizer.slashStarComments(true);
}
/**
* Parse a filter definition string. The format is as follows:<br>
* "Exact words" "including:colons" id:100 name:fighter*
* @param filterStr the filter string
* @return the list of fields and patterns to check
*/
public static List<Pair<String, Pattern>> parseFilter(String filterStr) {
List<Pair<String, Pattern>> result = new ArrayList<>();
StreamTokenizer st = new StreamTokenizer(new StringReader(filterStr));
st.slashSlashComments(false);
st.slashStarComments(false);
st.lowerCaseMode(true);
st.wordChars('*', '*');
st.wordChars('?', '?');
st.wordChars('.', '.');
st.wordChars('@', '@');
st.wordChars('-', '-');
st.wordChars('_', '_');
st.quoteChar('"');
List<String> tokens = new ArrayList<>();
try {
while (true) {
int tok = st.nextToken();
if (tok == StreamTokenizer.TT_EOF) {
break;
} else
if (tok == StreamTokenizer.TT_WORD || tok == '"') {
tokens.add(st.sval);
} else {
tokens.add(String.valueOf((char)tok));
}
}
} catch (IOException ex) {
// ignored
}
for (int i = 0; i < tokens.size(); i++) {
String key = tokens.get(i);
if (i < tokens.size() - 1 && tokens.get(i + 1).equals(":")) {
if (i < tokens.size() - 2) {
result.add(Pair.of(key, wildcardToRegex(tokens.get(i + 2))));
i += 2;
} else {
result.add(Pair.of(key, wildcardToRegex("")));
}
} else {
result.add(Pair.of("", wildcardToRegex(key)));
}
}
return result;
}
public void testStreamTokenizer() throws Exception {
String str = "Testing 12345 \n alpha \r\n omega";
String strb = "-3.8 'BLIND mice' \r sEe /* how */ they run";
StringReader aa = new StringReader(str);
StringReader ba = new StringReader(strb);
StreamTokenizer a = new StreamTokenizer(aa);
StreamTokenizer b = new StreamTokenizer(ba);
assertEquals(1, a.lineno());
assertEquals(StreamTokenizer.TT_WORD, a.nextToken());
assertEquals("Token[Testing], line 1", a.toString());
assertEquals(StreamTokenizer.TT_NUMBER, a.nextToken());
assertEquals("Token[n=12345.0], line 1", a.toString());
assertEquals(StreamTokenizer.TT_WORD, a.nextToken());
assertEquals("Token[alpha], line 2", a.toString());
assertEquals(StreamTokenizer.TT_WORD, a.nextToken());
assertEquals("Token[omega], line 3", a.toString());
assertEquals(StreamTokenizer.TT_EOF, a.nextToken());
assertEquals("Token[EOF], line 3", a.toString());
b.commentChar('u');
b.eolIsSignificant(true);
b.lowerCaseMode(true);
b.ordinaryChar('y');
b.slashStarComments(true);
assertEquals(StreamTokenizer.TT_NUMBER, b.nextToken());
assertEquals(-3.8, b.nval);
assertEquals("Token[n=-3.8], line 1", b.toString());
assertEquals(39, b.nextToken()); // '
assertEquals("Token[BLIND mice], line 1", b.toString());
assertEquals(10, b.nextToken()); // \n
assertEquals("Token[EOL], line 2", b.toString());
assertEquals(StreamTokenizer.TT_WORD, b.nextToken());
assertEquals("Token[see], line 2", b.toString());
assertEquals(StreamTokenizer.TT_WORD, b.nextToken());
assertEquals("Token[the], line 2", b.toString());
assertEquals(121, b.nextToken()); // y
assertEquals("Token['y'], line 2", b.toString());
assertEquals(StreamTokenizer.TT_WORD, b.nextToken());
assertEquals("Token[r], line 2", b.toString());
assertEquals(StreamTokenizer.TT_EOF, b.nextToken());
assertEquals("Token[EOF], line 2", b.toString());
// A harmony regression test
byte[] data = new byte[]{(byte) '-'};
StreamTokenizer tokenizer = new StreamTokenizer(new ByteArrayInputStream(data));
tokenizer.nextToken();
String result = tokenizer.toString();
assertEquals("Token['-'], line 1", result);
// another harmony regression test
byte[] data2 = new byte[]{(byte) '"',
(byte) 'H',
(byte) 'e',
(byte) 'l',
(byte) 'l',
(byte) 'o',
(byte) '"'};
StreamTokenizer tokenizer2 = new StreamTokenizer(new ByteArrayInputStream(data2));
tokenizer2.nextToken();
result = tokenizer2.toString();
assertEquals("Token[Hello], line 1", result);
}
public void test_basicStringTokenizerMethods() throws IOException {
String str = "Testing 12345 \n alpha \r\n omega";
String strb = "-3.8 'BLIND mice' \r sEe /* how */ they run";
StringReader aa = new StringReader(str);
StringReader ba = new StringReader(strb);
StreamTokenizer a = new StreamTokenizer(aa);
StreamTokenizer b = new StreamTokenizer(ba);
Assert.assertTrue(a.lineno() == 1);
Assert.assertTrue(a.nextToken() == StreamTokenizer.TT_WORD);
Assert.assertTrue(a.toString().equals("Token[Testing], line 1"));
Assert.assertTrue(a.nextToken() == StreamTokenizer.TT_NUMBER);
Assert.assertTrue(a.toString().equals("Token[n=12345.0], line 1"));
Assert.assertTrue(a.nextToken() == StreamTokenizer.TT_WORD);
Assert.assertTrue(a.toString().equals("Token[alpha], line 2"));
Assert.assertTrue(a.nextToken() == StreamTokenizer.TT_WORD);
Assert.assertTrue(a.toString().equals("Token[omega], line 3"));
Assert.assertTrue(a.nextToken() == StreamTokenizer.TT_EOF);
Assert.assertTrue(a.toString().equals("Token[EOF], line 3"));
b.commentChar('u');
b.eolIsSignificant(true);
b.lowerCaseMode(true);
b.ordinaryChar('y');
b.slashStarComments(true);
Assert.assertTrue(b.nextToken() == StreamTokenizer.TT_NUMBER);
Assert.assertTrue(b.nval == -3.8);
Assert.assertTrue(b.toString().equals("Token[n=-3.8], line 1"));
Assert.assertTrue(b.nextToken() == 39); // '
Assert.assertTrue(b.toString().equals("Token[BLIND mice], line 1"));
Assert.assertTrue(b.nextToken() == 10); // \n
Assert.assertTrue(b.toString().equals("Token[EOL], line 2"));
Assert.assertTrue(b.nextToken() == StreamTokenizer.TT_WORD);
Assert.assertTrue(b.toString().equals("Token[see], line 2"));
Assert.assertTrue(b.nextToken() == StreamTokenizer.TT_WORD);
Assert.assertTrue(b.toString().equals("Token[the], line 2"));
Assert.assertTrue(b.nextToken() == 121); // y
Assert.assertTrue(b.toString().equals("Token['y'], line 2"));
Assert.assertTrue(b.nextToken() == StreamTokenizer.TT_WORD);
Assert.assertTrue(b.toString().equals("Token[r], line 2"));
Assert.assertTrue(b.nextToken() == StreamTokenizer.TT_EOF);
Assert.assertTrue(b.toString().equals("Token[EOF], line 2"));
}