Mercurial > hg > Members > kono > PLparser
view src/plparser/PropertyListStreamTokenizer.java @ 11:79d492bce828
clean up
author | one |
---|---|
date | Thu, 02 Sep 2010 11:55:56 +0900 |
parents | 29e309b2f624 |
children |
line wrap: on
line source
package plparser; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.StreamTokenizer; import java.io.StringReader; /** * このTokenizer は実装は簡単だが、複数行を持つ"",''を取り扱えないらしい。 * * @author kono * * @param <T> */ public class PropertyListStreamTokenizer<T> extends PLScannerImpl<T> implements PLScanner<T> { private StreamTokenizer tokenizer; public final static char QUOTE = '\''; public final static char DOUBLE_QUOTE = '"'; public PropertyListStreamTokenizer( PLScanner<T> s, Dictionary<T> dict, Token<T> nullToken) { super(s,dict,nullToken); } public void init() { tokenizer.resetSyntax(); tokenizer.wordChars('0', '9'); tokenizer.wordChars('a', 'z'); tokenizer.wordChars('A', 'Z'); tokenizer.wordChars('_', '_'); tokenizer.ordinaryChar('='); tokenizer.ordinaryChar('{'); tokenizer.ordinaryChar('}'); tokenizer.ordinaryChar('('); tokenizer.ordinaryChar(')'); tokenizer.ordinaryChar(';'); tokenizer.ordinaryChar(','); tokenizer.whitespaceChars(' ', ' '); tokenizer.whitespaceChars('\t', '\t'); tokenizer.whitespaceChars('\n', '\n'); tokenizer.whitespaceChars('\r', '\r'); tokenizer.quoteChar(QUOTE); tokenizer.quoteChar(DOUBLE_QUOTE); tokenizer.parseNumbers(); tokenizer.eolIsSignificant(false); tokenizer.slashStarComments(true); tokenizer.slashSlashComments(true); } @Override public Token<T> nextToken() { int token; nextToken = nullToken; lineno = tokenizer.lineno(); try { token = tokenizer.nextToken(); switch (token) { case StreamTokenizer.TT_EOF: return nextToken; case StreamTokenizer.TT_NUMBER: return nextToken = new Token<T>(tokenizer.sval,TokenID.NUMBER); case StreamTokenizer.TT_WORD: String s = tokenizer.sval; Token<T> t; if ((t = dict.get(s))==null) { dict.put(s, t = new Token<T>(s,TokenID.Any)); } return nextToken = t; case QUOTE: case DOUBLE_QUOTE: return nextToken = new Token<T>(tokenizer.sval,TokenID.VARIABLE); case StreamTokenizer.TT_EOL: if (prompt!=null) System.out.print(prompt); return nextToken(); default: return nextToken = new Token<T>(tokenizer.sval,TokenID.Any); } } catch (IOException e) { return nullToken; } } @Override public boolean hasRemaining() { int nextToken = StreamTokenizer.TT_EOF; try { nextToken = tokenizer.nextToken(); } catch (IOException e) { return false; }; return nextToken!=StreamTokenizer.TT_EOF; } @Override public PLScanner<T> pushScannerFile(InputStream newfile, String prompt) { return new PropertyListStreamTokenizer<T>(this,dict,nullToken).setFile(newfile,prompt); } @Override public PLScanner<T> pushScanner(String exp) { return new PropertyListStreamTokenizer<T>(this,dict,nullToken).set(exp); } @Override public PLScanner<T> pushScannerFile(String file) throws FileNotFoundException { return new PropertyListStreamTokenizer<T>(this,dict,nullToken).setFile(file); } @Override public PLScanner<T> set(String exp) { Reader reader = new StringReader(exp); tokenizer = new StreamTokenizer(reader); return this; } @Override public PLScanner<T> setFile(String file) throws FileNotFoundException { Reader reader = new FileReader(file); tokenizer = new StreamTokenizer(reader); return this; } @Override public PLScanner<T> set(InputStreamReader reader) { tokenizer = new StreamTokenizer(reader); return this; } }