Java Code Examples for com.alibaba.druid.sql.parser.Token#LPAREN
The following examples show how to use
com.alibaba.druid.sql.parser.Token#LPAREN .
These examples are extracted from open source projects.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: elasticsearch-sql File: ElasticSqlExprParser.java License: Apache License 2.0 | 5 votes |
public SQLName userName() { SQLName name = this.name(); if (lexer.token() == Token.LPAREN && name.hashCode64() == FnvHash.Constants.CURRENT_USER) { lexer.nextToken(); accept(Token.RPAREN); return name; } return (SQLName) userNameRest(name); }
Example 2
Source Project: Mycat2 File: MycatSelectParser.java License: GNU General Public License v3.0 | 4 votes |
public void parseTop() { if (lexer.token() == Token.TOP) { lexer.nextToken(); boolean paren = false; if (lexer.token() == Token.LPAREN) { paren = true; lexer.nextToken(); } if (paren) { accept(Token.RPAREN); } if (lexer.token() == Token.LITERAL_INT) { lexer.mark(); lexer.nextToken(); } if (lexer.token() == Token.IDENTIFIER) { lexer.nextToken(); } if (lexer.token() == Token.EQ||lexer.token() == Token.DOT) { lexer.nextToken(); } else if(lexer.token() != Token.STAR) { lexer.reset(); } if (lexer.token() == Token.PERCENT) { lexer.nextToken(); } } }
Example 3
Source Project: Mycat2 File: MycatExprParser.java License: GNU General Public License v3.0 | 4 votes |
public void parseTop() { if (lexer.token() == Token.TOP) { lexer.nextToken(); boolean paren = false; if (lexer.token() == Token.LPAREN) { paren = true; lexer.nextToken(); } if (paren) { accept(Token.RPAREN); } if (lexer.token() == Token.LITERAL_INT) { lexer.mark(); lexer.nextToken(); } if (lexer.token() == Token.IDENTIFIER) { lexer.nextToken(); } if (lexer.token() == Token.EQ||lexer.token() == Token.DOT) { lexer.nextToken(); } else if(lexer.token() != Token.STAR) { lexer.reset(); } if (lexer.token() == Token.PERCENT) { lexer.nextToken(); } } }
Example 4
Source Project: elasticsearch-sql File: ElasticSqlExprParser.java License: Apache License 2.0 | 4 votes |
@Override public MySqlPrimaryKey parsePrimaryKey() { accept(Token.PRIMARY); accept(Token.KEY); MySqlPrimaryKey primaryKey = new MySqlPrimaryKey(); if (lexer.identifierEquals(FnvHash.Constants.USING)) { lexer.nextToken(); primaryKey.setIndexType(lexer.stringVal()); lexer.nextToken(); } if (lexer.token() != Token.LPAREN) { SQLName name = this.name(); primaryKey.setName(name); } accept(Token.LPAREN); for (;;) { SQLExpr expr; if (lexer.token() == Token.LITERAL_ALIAS) { expr = this.name(); } else { expr = this.expr(); } primaryKey.addColumn(expr); if (!(lexer.token() == (Token.COMMA))) { break; } else { lexer.nextToken(); } } accept(Token.RPAREN); if (lexer.identifierEquals(FnvHash.Constants.USING)) { lexer.nextToken(); primaryKey.setIndexType(lexer.stringVal()); lexer.nextToken(); } return primaryKey; }
Example 5
Source Project: elasticsearch-sql File: ElasticSqlExprParser.java License: Apache License 2.0 | 4 votes |
public SQLPartition parsePartition() { accept(Token.PARTITION); SQLPartition partitionDef = new SQLPartition(); partitionDef.setName(this.name()); SQLPartitionValue values = this.parsePartitionValues(); if (values != null) { partitionDef.setValues(values); } for (;;) { boolean storage = false; if (lexer.identifierEquals(FnvHash.Constants.DATA)) { lexer.nextToken(); acceptIdentifier("DIRECTORY"); if (lexer.token() == Token.EQ) { lexer.nextToken(); } partitionDef.setDataDirectory(this.expr()); } else if (lexer.token() == Token.TABLESPACE) { lexer.nextToken(); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLName tableSpace = this.name(); partitionDef.setTablespace(tableSpace); } else if (lexer.token() == Token.INDEX) { lexer.nextToken(); acceptIdentifier("DIRECTORY"); if (lexer.token() == Token.EQ) { lexer.nextToken(); } partitionDef.setIndexDirectory(this.expr()); } else if (lexer.identifierEquals(FnvHash.Constants.MAX_ROWS)) { lexer.nextToken(); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLExpr maxRows = this.primary(); partitionDef.setMaxRows(maxRows); } else if (lexer.identifierEquals(FnvHash.Constants.MIN_ROWS)) { lexer.nextToken(); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLExpr minRows = this.primary(); partitionDef.setMaxRows(minRows); } else if (lexer.identifierEquals(FnvHash.Constants.ENGINE) || // (storage = (lexer.token() == Token.STORAGE || lexer.identifierEquals(FnvHash.Constants.STORAGE)))) { if (storage) { lexer.nextToken(); } acceptIdentifier("ENGINE"); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLName engine = this.name(); partitionDef.setEngine(engine); } else if (lexer.token() == Token.COMMENT) { lexer.nextToken(); if (lexer.token() == Token.EQ) { lexer.nextToken(); } SQLExpr comment = this.primary(); partitionDef.setComment(comment); } else { break; } } if (lexer.token() == Token.LPAREN) { lexer.nextToken(); for (;;) { acceptIdentifier("SUBPARTITION"); SQLName subPartitionName = this.name(); SQLSubPartition subPartition = new SQLSubPartition(); subPartition.setName(subPartitionName); partitionDef.addSubPartition(subPartition); if (lexer.token() == Token.COMMA) { lexer.nextToken(); continue; } break; } accept(Token.RPAREN); } return partitionDef; }