Java Code Examples for java.io.StreamTokenizer#parseNumbers()

The following examples show how to use java.io.StreamTokenizer#parseNumbers() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ExpectedResultsParser.java    From tablasco with Apache License 2.0 6 votes vote down vote up
private void parse(Reader reader) throws ParseException, IOException
{
    StreamTokenizer st = new StreamTokenizer(reader);
    st.eolIsSignificant(true);
    st.wordChars((int) '_', (int) '_');
    st.parseNumbers();
    st.quoteChar((int) '"');
    // These calls caused comments to be discarded
    st.slashSlashComments(true);
    st.slashStarComments(true);

    // Parse the file
    ParserState currentState = this.getBeginningOfLineState();
    while (currentState != null)
    {
        currentState = currentState.parse(st);
    }
}
 
Example 2
Source File: Parser.java    From hadoop with Apache License 2.0 5 votes vote down vote up
Lexer(String s) {
  tok = new StreamTokenizer(new CharArrayReader(s.toCharArray()));
  tok.quoteChar('"');
  tok.parseNumbers();
  tok.ordinaryChar(',');
  tok.ordinaryChar('(');
  tok.ordinaryChar(')');
  tok.wordChars('$','$');
  tok.wordChars('_','_');
}
 
Example 3
Source File: Parser.java    From hadoop with Apache License 2.0 5 votes vote down vote up
Lexer(String s) {
  tok = new StreamTokenizer(new CharArrayReader(s.toCharArray()));
  tok.quoteChar('"');
  tok.parseNumbers();
  tok.ordinaryChar(',');
  tok.ordinaryChar('(');
  tok.ordinaryChar(')');
  tok.wordChars('$','$');
  tok.wordChars('_','_');
}
 
Example 4
Source File: Parser.java    From big-c with Apache License 2.0 5 votes vote down vote up
Lexer(String s) {
  tok = new StreamTokenizer(new CharArrayReader(s.toCharArray()));
  tok.quoteChar('"');
  tok.parseNumbers();
  tok.ordinaryChar(',');
  tok.ordinaryChar('(');
  tok.ordinaryChar(')');
  tok.wordChars('$','$');
  tok.wordChars('_','_');
}
 
Example 5
Source File: Parser.java    From big-c with Apache License 2.0 5 votes vote down vote up
Lexer(String s) {
  tok = new StreamTokenizer(new CharArrayReader(s.toCharArray()));
  tok.quoteChar('"');
  tok.parseNumbers();
  tok.ordinaryChar(',');
  tok.ordinaryChar('(');
  tok.ordinaryChar(')');
  tok.wordChars('$','$');
  tok.wordChars('_','_');
}
 
Example 6
Source File: FileSourceBase.java    From gama with GNU General Public License v3.0 5 votes vote down vote up
/**
 * Method to override to configure the tokenizer behaviour. It is called each time a tokenizer is created (for the
 * parsed file and all included files).
 */
protected void configureTokenizer(final StreamTokenizer tok) throws IOException {
	if (COMMENT_CHAR > 0) {
		tok.commentChar(COMMENT_CHAR);
	}
	tok.quoteChar(QUOTE_CHAR);
	tok.eolIsSignificant(eol_is_significant);
	tok.wordChars('_', '_');
	tok.parseNumbers();
}
 
Example 7
Source File: Parser.java    From RDFS with Apache License 2.0 5 votes vote down vote up
Lexer(String s) {
  tok = new StreamTokenizer(new CharArrayReader(s.toCharArray()));
  tok.quoteChar('"');
  tok.parseNumbers();
  tok.ordinaryChar(',');
  tok.ordinaryChar('(');
  tok.ordinaryChar(')');
  tok.wordChars('$','$');
  tok.wordChars('_','_');
}
 
Example 8
Source File: Parser.java    From hadoop-gpu with Apache License 2.0 5 votes vote down vote up
Lexer(String s) {
  tok = new StreamTokenizer(new CharArrayReader(s.toCharArray()));
  tok.quoteChar('"');
  tok.parseNumbers();
  tok.ordinaryChar(',');
  tok.ordinaryChar('(');
  tok.ordinaryChar(')');
  tok.wordChars('$','$');
  tok.wordChars('_','_');
}
 
Example 9
Source File: CSVBuilder.java    From OpenForecast with GNU Lesser General Public License v2.1 4 votes vote down vote up
/**
 * Builds a DataPoint from the given CSV line. This method should only be
 * used to parse a line that is expected to be made up of numeric data
 * only. Use {@link #readHeaderRow} to read a header row if one is expected.
 * @param line the input line of comma separated values to parse and use
 * to construct a new DataPoint.
 * @return a DataPoint object with values as specified by the given input
 * String.
 */
private DataPoint build( String line )
    throws IOException
{
    Observation dataPoint = new Observation( 0.0 );
    
    StreamTokenizer tokenizer
        = new StreamTokenizer( new StringReader( line ) );
    
    tokenizer.commentChar( '#' );
    tokenizer.eolIsSignificant( true );
    tokenizer.parseNumbers();
    
    int i = 0;
    int n = getNumberOfVariables();
    int lastToken = SEPARATOR;
    do
        {
            // Read next token
            tokenizer.nextToken();
            switch ( tokenizer.ttype )
                {
                case '\t':
                case ' ':
                    // Skip whitespace
                    continue;
                    
                case SEPARATOR:
                    // Check for two adjacent commas
                    if ( lastToken != SEPARATOR )
                        break;
                    
                    // Two adjacent commas. Assume 0.0 between them
                    tokenizer.nval = 0.0;
                    
                    // Fall through, and handle as a number
                    
                case StreamTokenizer.TT_NUMBER:
                    // Handle numbers appropriately as data
                    
                    // If this is the last value on the line, treat it
                    //  as the dependent variable value
                    if ( i == n )
                        dataPoint.setDependentValue(tokenizer.nval);
                    else
                        dataPoint.setIndependentValue(getVariableName(i),
                                                      tokenizer.nval);
                    
                    i++;
                    break;
                    
                case StreamTokenizer.TT_WORD:
                    throw new IOException( "Invalid input in CSV file. Number expected, found '"+tokenizer.sval+"'");
                    
                case StreamTokenizer.TT_EOL:
                case StreamTokenizer.TT_EOF:
                    break;
                    
                default:
                }
            
            lastToken = tokenizer.ttype;
        }
    while ( tokenizer.ttype != StreamTokenizer.TT_EOF );
    
    return dataPoint;
}