org.apache.hadoop.hive.ql.parse.ASTNode Java Examples

The following examples show how to use org.apache.hadoop.hive.ql.parse.ASTNode. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: Parser.java    From Eagle with Apache License 2.0 7 votes vote down vote up
private void visitSubtree(ASTNode ast) {
  int len = ast.getChildCount();
  if (len > 0) {
    for (Node n : ast.getChildren()) {
      ASTNode asn = (ASTNode)n;
      switch (asn.getToken().getType()) {
      case HiveParser.TOK_TABNAME:
        tableSet.add(ast.getChild(0).getChild(0).getText());
        break;
      case HiveParser.TOK_SET_COLUMNS_CLAUSE:
        for (int i = 0; i < asn.getChildCount(); i++) {
          addToColumnSet((ASTNode)asn.getChild(i).getChild(0));
        }
      case HiveParser.TOK_FROM:
        parseFromClause((ASTNode)asn.getChild(0));
        break;
      case HiveParser.TOK_INSERT:
        for (int i = 0; i < asn.getChildCount(); i++) {
          parseInsertClause((ASTNode)asn.getChild(i));                           
        }
        break;
      case HiveParser.TOK_UNIONTYPE: 
        int childcount = asn.getChildCount();
        for (int i = 0; i < childcount; i++) {    
          parseQL((ASTNode)asn.getChild(i));
        }
        break;
      }
    }

    // Add tableSet and columnSet to tableColumnMap
    addTablesColumnsToMap(tableSet, columnSet);
  }
}
 
Example #2
Source File: Parser.java    From Eagle with Apache License 2.0 7 votes vote down vote up
private void parseQL(ASTNode ast) {
  switch (ast.getType()) {
  case HiveParser.TOK_QUERY:
    visitSubtree(ast);
    break;

  case HiveParser.TOK_UPDATE_TABLE:
    setOperation("UPDATE");
    visitSubtree(ast);
    break;

  case HiveParser.TOK_DELETE_FROM:
    setOperation("DELETE FROM");
    visitSubtree(ast);
    break;

  default:
    LOG.error("Unsupporting query operation " + ast.getType());
    throw new IllegalStateException("Query operation is not supported "
        + ast.getType());
  }
}
 
Example #3
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
@Override
public Task<? extends Serializable> createShowRoleGrantTask(ASTNode ast, Path resultFile,
    HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
  ASTNode child = (ASTNode) ast.getChild(0);
  PrincipalType principalType = PrincipalType.USER;
  switch (child.getType()) {
  case HiveParser.TOK_USER:
    principalType = PrincipalType.USER;
    break;
  case HiveParser.TOK_GROUP:
    principalType = PrincipalType.GROUP;
    break;
  case HiveParser.TOK_ROLE:
    principalType = PrincipalType.ROLE;
    break;
  }
  if (principalType != PrincipalType.GROUP) {
    String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principalType;
    throw new SemanticException(msg);
  }
  String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
  RoleDDLDesc roleDesc = new RoleDDLDesc(principalName, principalType,
      RoleDDLDesc.RoleOperation.SHOW_ROLE_GRANT, null);
  roleDesc.setResFile(resultFile.toString());
  return createTask(new DDLWork(inputs, outputs,  roleDesc));
}
 
Example #4
Source File: Parser.java    From eagle with Apache License 2.0 6 votes vote down vote up
private void parseQueryClause(ASTNode ast) {
  int len = ast.getChildCount();
  if (len > 0) {
    for (Node n : ast.getChildren()) {
      ASTNode asn = (ASTNode) n;
      switch (asn.getToken().getType()) {
        case HiveParser.TOK_FROM:
          parseFromClause((ASTNode) asn.getChild(0));
          break;
        case HiveParser.TOK_INSERT:
          for (int i = 0; i < asn.getChildCount(); i++) {
            parseInsertClause((ASTNode) asn.getChild(i));
          }
          break;
      }
    }
  }
}
 
Example #5
Source File: Parser.java    From eagle with Apache License 2.0 6 votes vote down vote up
private void parseTokFunction(ASTNode ast, Set<String> set) {
  switch(ast.getType()) {
    case HiveParser.TOK_TABLE_OR_COL:
      String colRealName = convAliasToReal(columnAliasMap, ast.getChild(0).getText());
      set.add(colRealName);
      break;
    case HiveParser.TOK_FUNCTION:
      for (int i = 0; i < ast.getChildCount(); i++) {
        ASTNode n = (ASTNode)ast.getChild(i);
        if (n != null) {
          parseTokFunction(n, set);
        }
      }
      break;
  }
}
 
Example #6
Source File: Parser.java    From Eagle with Apache License 2.0 6 votes vote down vote up
private void parseSubQuery(ASTNode subQuery) {

    int cc = 0;
    int cp = 0;

    switch (subQuery.getToken().getType()) {
    case HiveParser.TOK_QUERY:
      visitSubtree(subQuery);
      break;
    case HiveParser.TOK_UNIONTYPE: 
      cc = subQuery.getChildCount();

      for ( cp = 0; cp < cc; ++cp) {    
        parseSubQuery((ASTNode)subQuery.getChild(cp));
      }
      break;
    }
  }
 
Example #7
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
@Override
public Task<? extends Serializable> createRevokeTask(ASTNode ast, HashSet<ReadEntity> inputs,
    HashSet<WriteEntity> outputs) throws SemanticException {
  List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef((ASTNode) ast.getChild(0));
  List<PrincipalDesc> principalDesc = analyzePrincipalListDef((ASTNode) ast.getChild(1));
  PrivilegeObjectDesc privilegeObj = null;
  if (ast.getChildCount() > 2) {
    ASTNode astChild = (ASTNode) ast.getChild(2);
    privilegeObj = analyzePrivilegeObject(astChild);
  }
  if (privilegeObj != null && privilegeObj.getPartSpec() != null) {
    throw new SemanticException(SentryHiveConstants.PARTITION_PRIVS_NOT_SUPPORTED);
  }
  for (PrincipalDesc princ : principalDesc) {
    if (princ.getType() != PrincipalType.ROLE) {
      String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + princ.getType();
      throw new SemanticException(msg);
    }
  }
  RevokeDesc revokeDesc = new RevokeDesc(privilegeDesc, principalDesc, privilegeObj);
  return createTask(new DDLWork(inputs, outputs, revokeDesc));
}
 
Example #8
Source File: HiveASTRewriter.java    From incubator-atlas with Apache License 2.0 6 votes vote down vote up
public String rewrite(String sourceQry) throws RewriteException {
    String result = sourceQry;
    ASTNode tree = null;
    try {
        ParseDriver pd = new ParseDriver();
        tree = pd.parse(sourceQry, queryContext, true);
        tree = ParseUtils.findRootNonNullToken(tree);
        this.rwCtx = new RewriteContext(sourceQry, tree, queryContext.getTokenRewriteStream());
        rewrite(tree);
        result = toSQL();
    } catch (ParseException e) {
       LOG.error("Could not parse the query {} ", sourceQry, e);
        throw new RewriteException("Could not parse query : " , e);
    }
    return result;
}
 
Example #9
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
private Task<? extends Serializable> analyzeGrantRevokeRole(boolean isGrant, ASTNode ast,
    HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
  List<PrincipalDesc> principalDesc = analyzePrincipalListDef(
      (ASTNode) ast.getChild(0));

  List<String> roles = new ArrayList<String>();
  for (int i = 1; i < ast.getChildCount(); i++) {
    roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText()));
  }
  String roleOwnerName = "";
  if (SessionState.get() != null
      && SessionState.get().getAuthenticator() != null) {
    roleOwnerName = SessionState.get().getAuthenticator().getUserName();
  }
  for (PrincipalDesc princ : principalDesc) {
    if (princ.getType() != PrincipalType.GROUP) {
      String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_ON_OBJECT + princ.getType();
      throw new SemanticException(msg);
    }
  }
  GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(isGrant,
      roles, principalDesc, roleOwnerName, PrincipalType.USER, false);
  return createTask(new DDLWork(inputs, outputs, grantRevokeRoleDDL));
}
 
Example #10
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
@Override
public Task<? extends Serializable> createShowRolePrincipalsTask(ASTNode ast, Path resFile,
    HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
  String roleName;

  if (ast.getChildCount() == 1) {
    roleName = ast.getChild(0).getText();
  } else {
    // the parser should not allow this
    throw new AssertionError("Unexpected Tokens in SHOW ROLE PRINCIPALS");
  }

  RoleDDLDesc roleDDLDesc = new RoleDDLDesc(roleName, PrincipalType.ROLE,
   RoleDDLDesc.RoleOperation.SHOW_ROLE_PRINCIPALS, null);
  roleDDLDesc.setResFile(resFile.toString());
  return createTask(new DDLWork(inputs, outputs, roleDDLDesc));
  //return TaskFactory.get(new DDLWork(inputs, outputs, roleDDLDesc), conf);
}
 
Example #11
Source File: HiveAuthzBindingHook.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
@VisibleForTesting
protected static AccessURI extractPartition(ASTNode ast) throws SemanticException {
  for (int i = 0; i < ast.getChildCount(); i++) {
    ASTNode child = (ASTNode)ast.getChild(i);
    if (child.getToken().getType() == HiveParser.TOK_PARTITIONLOCATION &&
        child.getChildCount() == 1) {
      return parseURI(BaseSemanticAnalyzer.
        unescapeSQLString(child.getChild(0).getText()));
    }
  }
  return null;
}
 
Example #12
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
@Override
public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs,
    HashSet<WriteEntity> outputs) throws SemanticException {
  SentryHivePrivilegeObjectDesc privHiveObj = null;

  ASTNode principal = (ASTNode) ast.getChild(0);
  PrincipalType type = PrincipalType.USER;
  switch (principal.getType()) {
  case HiveParser.TOK_USER:
    type = PrincipalType.USER;
    break;
  case HiveParser.TOK_GROUP:
    type = PrincipalType.GROUP;
    break;
  case HiveParser.TOK_ROLE:
    type = PrincipalType.ROLE;
    break;
  }
  if (type != PrincipalType.ROLE) {
    String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + type;
    throw new SemanticException(msg);
  }
  String principalName = BaseSemanticAnalyzer.unescapeIdentifier(principal.getChild(0).getText());
  PrincipalDesc principalDesc = new PrincipalDesc(principalName, type);

  // Partition privileges are not supported by Sentry
  if (ast.getChildCount() > 1) {
    ASTNode child = (ASTNode) ast.getChild(1);
    if (child.getToken().getType() == HiveParser.TOK_PRIV_OBJECT_COL) {
      privHiveObj = analyzePrivilegeObject(child);
    } else {
      throw new SemanticException("Unrecognized Token: " + child.getToken().getType());
    }
  }

  ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(),
      principalDesc, privHiveObj);
  return createTask(new DDLWork(inputs, outputs, showGrant));
}
 
Example #13
Source File: HiveAuthzBindingHook.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
private Table extractTable(ASTNode ast) throws SemanticException {
  String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
  if (tableName.contains(".")) {
    return new Table(tableName.split("\\.")[1]);
  } else {
    return new Table(tableName);
  }
}
 
Example #14
Source File: Parser.java    From eagle with Apache License 2.0 5 votes vote down vote up
private void addToColumnSet(ASTNode node) {
  ASTNode child_0 = (ASTNode)node.getChild(0).getChild(0);
  ASTNode child_1 = (ASTNode)node.getChild(1);
  if (child_1 != null) {
    columnAliasMap.put(child_1.getText(), child_0.getText());
  }
  String col = convAliasToReal(columnAliasMap, child_0.getText());
  columnSet.add(col);
}
 
Example #15
Source File: TestParser.java    From eagle with Apache License 2.0 5 votes vote down vote up
private void printTree(ASTNode root, int indent) {
  if ( root != null ) {
    StringBuffer sb = new StringBuffer(indent);
    for ( int i = 0; i < indent; i++ )
      sb = sb.append("   ");
    for ( int i = 0; i < root.getChildCount(); i++ ) {
      System.out.println(sb.toString() + root.getChild(i).getText());
      printTree((ASTNode)root.getChild(i), indent + 1);
    }
  }
}
 
Example #16
Source File: AbstractHive3QLProcessor.java    From nifi with Apache License 2.0 5 votes vote down vote up
protected Set<TableName> findTableNames(final String query) {
    final ASTNode node;
    try {
        node = new ParseDriver().parse(normalize(query));
    } catch (ParseException e) {
        // If failed to parse the query, just log a message, but continue.
        getLogger().debug("Failed to parse query: {} due to {}", new Object[]{query, e}, e);
        return Collections.emptySet();
    }
    final HashSet<TableName> tableNames = new HashSet<>();
    findTableNames(node, tableNames);
    return tableNames;
}
 
Example #17
Source File: AbstractHiveQLProcessor.java    From nifi with Apache License 2.0 5 votes vote down vote up
protected Set<TableName> findTableNames(final String query) {
    final ASTNode node;
    try {
        node = new ParseDriver().parse(normalize(query));
    } catch (ParseException e) {
        // If failed to parse the query, just log a message, but continue.
        getLogger().debug("Failed to parse query: {} due to {}", new Object[]{query, e}, e);
        return Collections.emptySet();
    }

    final HashSet<TableName> tableNames = new HashSet<>();
    findTableNames(node, tableNames);
    return tableNames;
}
 
Example #18
Source File: AbstractHive_1_1QLProcessor.java    From nifi with Apache License 2.0 5 votes vote down vote up
protected Set<TableName> findTableNames(final String query) {
    final ASTNode node;
    try {
        node = new ParseDriver().parse(normalize(query));
    } catch (ParseException e) {
        // If failed to parse the query, just log a message, but continue.
        getLogger().debug("Failed to parse query: {} due to {}", new Object[]{query, e}, e);
        return Collections.emptySet();
    }

    final HashSet<TableName> tableNames = new HashSet<>();
    findTableNames(node, tableNames);
    return tableNames;
}
 
Example #19
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
@Override
public Task<? extends Serializable> createCreateRoleTask(ASTNode ast, HashSet<ReadEntity> inputs,
    HashSet<WriteEntity> outputs) throws SemanticException {
  String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
  if (AccessConstants.RESERVED_ROLE_NAMES.contains(roleName.toUpperCase())) {
    String msg = "Roles cannot be one of the reserved roles: " + AccessConstants.RESERVED_ROLE_NAMES;
    throw new SemanticException(msg);
  }
  RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, RoleDDLDesc.RoleOperation.CREATE_ROLE);
  return createTask(new DDLWork(inputs, outputs, roleDesc));
}
 
Example #20
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
@Override
public Task<? extends Serializable> createDropRoleTask(ASTNode ast, HashSet<ReadEntity> inputs,
    HashSet<WriteEntity> outputs) throws SemanticException {
  String roleName = BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(0).getText());
  if (AccessConstants.RESERVED_ROLE_NAMES.contains(roleName.toUpperCase())) {
    String msg = "Roles cannot be one of the reserved roles: " + AccessConstants.RESERVED_ROLE_NAMES;
    throw new SemanticException(msg);
  }
  RoleDDLDesc roleDesc = new RoleDDLDesc(roleName, RoleDDLDesc.RoleOperation.DROP_ROLE);
  return createTask(new DDLWork(inputs, outputs, roleDesc));
}
 
Example #21
Source File: HiveAuthzBindingHook.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
private Database extractDatabase(ASTNode ast) throws SemanticException {
  String tableName = BaseSemanticAnalyzer.getUnescapedName(ast);
  if (tableName.contains(".")) {
    return new Database(tableName.split("\\.")[0]);
  } else {
    return getCanonicalDb();
  }
}
 
Example #22
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
private List<PrincipalDesc> analyzePrincipalListDef(ASTNode node) {
  List<PrincipalDesc> principalList = new ArrayList<PrincipalDesc>();
  for (int i = 0; i < node.getChildCount(); i++) {
    ASTNode child = (ASTNode) node.getChild(i);
    PrincipalType type = null;
    switch (child.getType()) {
    case 880:
      type = PrincipalType.USER;
      break;
    case HiveParser.TOK_USER:
      type = PrincipalType.USER;
      break;
    case 685:
      type = PrincipalType.GROUP;
      break;
    case HiveParser.TOK_GROUP:
      type = PrincipalType.GROUP;
      break;
    case 782:
      type = PrincipalType.ROLE;
      break;
    case HiveParser.TOK_ROLE:
      type = PrincipalType.ROLE;
      break;
    }
    String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
    PrincipalDesc principalDesc = new PrincipalDesc(principalName, type);
    LOG.debug("## Principal : [ " + principalName + ", " + type + "]");
    principalList.add(principalDesc);
  }
  return principalList;
}
 
Example #23
Source File: TableProcessor.java    From circus-train with Apache License 2.0 5 votes vote down vote up
private String extractTableName(ASTNode tabNameNode) {
  try {
    return BaseSemanticAnalyzer.getDotName(BaseSemanticAnalyzer.getQualifiedTableName(tabNameNode));
  } catch (SemanticException e) {
    throw new RuntimeException("Unable to extract qualified table name from node: " + tabNameNode.dump(), e);
  }
}
 
Example #24
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
@Override
public Task<? extends Serializable> createGrantTask(ASTNode ast, HashSet<ReadEntity> inputs,
    HashSet<WriteEntity> outputs) throws SemanticException {
  List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef(
      (ASTNode) ast.getChild(0));
  List<PrincipalDesc> principalDesc = analyzePrincipalListDef(
      (ASTNode) ast.getChild(1));
  SentryHivePrivilegeObjectDesc privilegeObj = null;
  boolean grantOption = false;
  if (ast.getChildCount() > 2) {
    for (int i = 2; i < ast.getChildCount(); i++) {
      ASTNode astChild = (ASTNode) ast.getChild(i);
      if (astChild.getType() == HiveParser.TOK_GRANT_WITH_OPTION) {
        grantOption = true;
      } else if (astChild.getType() == HiveParser.TOK_PRIV_OBJECT) {
        privilegeObj = analyzePrivilegeObject(astChild);
      }
    }
  }
  String userName = null;
  if (SessionState.get() != null
      && SessionState.get().getAuthenticator() != null) {
    userName = SessionState.get().getAuthenticator().getUserName();
  }
  Preconditions.checkNotNull(privilegeObj, "privilegeObj is null for " + ast.dump());
  if (privilegeObj.getPartSpec() != null) {
    throw new SemanticException(SentryHiveConstants.PARTITION_PRIVS_NOT_SUPPORTED);
  }
  for (PrincipalDesc princ : principalDesc) {
    if (princ.getType() != PrincipalType.ROLE) {
      String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + princ.getType();
      throw new SemanticException(msg);
    }
  }
  GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc,
      principalDesc, userName, PrincipalType.USER, grantOption);
  return createTask(new DDLWork(inputs, outputs, grantDesc));
}
 
Example #25
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
@Override
public Task<? extends Serializable> createShowRolesTask(ASTNode ast, Path resFile,
    HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
  RoleDDLDesc showRolesDesc = new RoleDDLDesc(null, null, RoleDDLDesc.RoleOperation.SHOW_ROLES,
      null);
  showRolesDesc.setResFile(resFile.toString());
  return createTask(new DDLWork(inputs, outputs, showRolesDesc));
}
 
Example #26
Source File: Parser.java    From eagle with Apache License 2.0 5 votes vote down vote up
private void visitSubtree(ASTNode ast) {
  int len = ast.getChildCount();
  if (len > 0) {
    for (Node n : ast.getChildren()) {
      ASTNode asn = (ASTNode)n;
      switch (asn.getToken().getType()) {
        case HiveParser.TOK_TABNAME:
          //tableSet.add(ast.getChild(0).getChild(0).getText());
          parserContent.getTableColumnMap().put(ast.getChild(0).getChild(0).getText(), new HashSet<>(columnSet));
          break;
        case HiveParser.TOK_SET_COLUMNS_CLAUSE:
          for (int i = 0; i < asn.getChildCount(); i++) {
            addToColumnSet((ASTNode) asn.getChild(i).getChild(0));
          }
          break;
        case HiveParser.TOK_QUERY:
          parseQueryClause(asn);
          break;
        case HiveParser.TOK_UNIONTYPE:
        case HiveParser.TOK_UNIONALL:
        case HiveParser.TOK_UNIONDISTINCT:
          visitSubtree(asn);
          break;
      }
    }
    // Add tableSet and columnSet to tableColumnMap
    addTablesColumnsToMap(tableSet, columnSet);
  }
}
 
Example #27
Source File: LiteralRewriter.java    From incubator-atlas with Apache License 2.0 5 votes vote down vote up
@Override
public void rewrite(RewriteContext ctx, final ASTNode node) throws RewriteException {
    try {
        processLiterals(ctx, node);
    } catch(Exception e) {
        throw new RewriteException("Could not normalize query", e);
    }
}
 
Example #28
Source File: HiveASTRewriter.java    From incubator-atlas with Apache License 2.0 5 votes vote down vote up
private void rewrite(ASTNode origin) throws RewriteException {
    ASTNode node = origin;
    if (node != null) {
        for(ASTRewriter rewriter : rewriters) {
            rewriter.rewrite(rwCtx, node);
        }
        if (node.getChildren() != null) {
            for (int i = 0; i < node.getChildren().size(); i++) {
                rewrite((ASTNode) node.getChild(i));
            }
        }
    }
}
 
Example #29
Source File: Parser.java    From Eagle with Apache License 2.0 5 votes vote down vote up
/**
 * 
 * @param query
 * @return
 * @throws Exception
 */
public HiveQLParserContent run(String query) throws Exception {
  ASTNode tree = generateAST(query);
  parseQL((ASTNode)tree.getChild(0));

  LOG.info("HiveQL parse completed.");

  return parserContent;
}
 
Example #30
Source File: TableProcessor.java    From circus-train with Apache License 2.0 5 votes vote down vote up
@Override
public Object process(Node node, Stack<Node> stack, NodeProcessorCtx procCtx, Object... nodeOutputs)
  throws SemanticException {
  ASTNode astNode = (ASTNode) node;
  if (astNode.getToken() != null && astNode.getToken().getText() != null) {
    if ("TOK_TABNAME".equals(astNode.getToken().getText())) {
      tables.add(extractTableName(astNode));
    }
  }
  return null;
}