Java Code Examples for org.apache.hadoop.hive.ql.hooks.Entity#getType()

The following examples show how to use org.apache.hadoop.hive.ql.hooks.Entity#getType() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: DropDatabase.java    From atlas with Apache License 2.0 6 votes vote down vote up
private List<AtlasObjectId> getHiveEntities() {
    List<AtlasObjectId> ret = new ArrayList<>();

    for (Entity entity : getOutputs()) {
        if (entity.getType() == DATABASE) {
            String        dbQName = getQualifiedName(entity.getDatabase());
            AtlasObjectId dbId    = new AtlasObjectId(HIVE_TYPE_DB, ATTRIBUTE_QUALIFIED_NAME, dbQName);

            context.removeFromKnownDatabase(dbQName);

            ret.add(dbId);
        } else if (entity.getType() == TABLE) {
            String        tblQName = getQualifiedName(entity.getTable());
            AtlasObjectId tblId    = new AtlasObjectId(HIVE_TYPE_TABLE, ATTRIBUTE_QUALIFIED_NAME, tblQName);

            context.removeFromKnownTable(tblQName);

            ret.add(tblId);
        }
    }

    return ret;
}
 
Example 2
Source File: DropTable.java    From atlas with Apache License 2.0 6 votes vote down vote up
public List<AtlasObjectId> getHiveEntities() {
    List<AtlasObjectId> ret = new ArrayList<>();

    for (Entity entity : getOutputs()) {
        if (entity.getType() == Entity.Type.TABLE) {
            String        tblQName = getQualifiedName(entity.getTable());
            AtlasObjectId tblId    = new AtlasObjectId(HIVE_TYPE_TABLE, ATTRIBUTE_QUALIFIED_NAME, tblQName);

            context.removeFromKnownTable(tblQName);

            ret.add(tblId);
        }
    }

    return ret;
}
 
Example 3
Source File: HiveHook.java    From incubator-atlas with Apache License 2.0 6 votes vote down vote up
private static void addInputs(HiveMetaStoreBridge hiveBridge, HiveOperation op, SortedSet<ReadEntity> sortedInputs, StringBuilder buffer, final Map<ReadEntity, Referenceable> refs, final boolean ignoreHDFSPathsInQFName) throws HiveException {
    if (refs != null) {
        if (sortedInputs != null) {
            Set<String> dataSetsProcessed = new LinkedHashSet<>();
            for (Entity input : sortedInputs) {

                if (!dataSetsProcessed.contains(input.getName().toLowerCase())) {
                    //HiveOperation.QUERY type encompasses INSERT, INSERT_OVERWRITE, UPDATE, DELETE, PATH_WRITE operations
                    if (ignoreHDFSPathsInQFName &&
                        (Type.DFS_DIR.equals(input.getType()) || Type.LOCAL_DIR.equals(input.getType()))) {
                        LOG.debug("Skipping dfs dir input addition to process qualified name {} ", input.getName());
                    } else if (refs.containsKey(input)) {
                        if ( input.getType() == Type.PARTITION || input.getType() == Type.TABLE) {
                            final Date createTime = HiveMetaStoreBridge.getTableCreatedTime(hiveBridge.hiveClient.getTable(input.getTable().getDbName(), input.getTable().getTableName()));
                            addDataset(buffer, refs.get(input), createTime.getTime());
                        } else {
                            addDataset(buffer, refs.get(input));
                        }
                    }
                    dataSetsProcessed.add(input.getName().toLowerCase());
                }
            }

        }
    }
}
 
Example 4
Source File: CreateTable.java    From atlas with Apache License 2.0 5 votes vote down vote up
public AtlasEntitiesWithExtInfo getHiveEntities() throws Exception {
    AtlasEntitiesWithExtInfo ret   = new AtlasEntitiesWithExtInfo();
    Table                    table = null;

    if (CollectionUtils.isNotEmpty(getOutputs())) {
        for (Entity entity : getOutputs()) {
            if (entity.getType() == Entity.Type.TABLE) {
                table = entity.getTable();

                if (table != null) {
                    table = getHive().getTable(table.getDbName(), table.getTableName());

                    if (table != null) {
                        if (skipTemporaryTable(table)) {
                            table = null;
                        } else {
                            break;
                        }
                    }
                }
            }
        }
    }

    processTable(table, ret);

    addProcessedEntities(ret);

    return ret;
}
 
Example 5
Source File: CreateDatabase.java    From atlas with Apache License 2.0 5 votes vote down vote up
public AtlasEntitiesWithExtInfo getHiveEntities() throws Exception {
    AtlasEntitiesWithExtInfo ret = new AtlasEntitiesWithExtInfo();

    for (Entity entity : getOutputs()) {
        if (entity.getType() == DATABASE) {
            Database db = entity.getDatabase();

            if (db != null) {
                db = getHive().getDatabase(getDatabaseName(db));
            }

            if (db != null) {
                AtlasEntity dbEntity    = toDbEntity(db);
                AtlasEntity dbDDLEntity = createHiveDDLEntity(dbEntity);

                ret.addEntity(dbEntity);

                if (dbDDLEntity != null) {
                    ret.addEntity(dbDDLEntity);
                }
            } else {
                LOG.error("CreateDatabase.getEntities(): failed to retrieve db");
            }
        }
    }

    addProcessedEntities(ret);

    return ret;
}
 
Example 6
Source File: HiveITBase.java    From atlas with Apache License 2.0 5 votes vote down vote up
protected static Entity getEntityByType(Set<? extends Entity> entities, Entity.Type entityType) {
    for (Entity entity : entities) {
        if (entity.getType() == entityType) {
            return entity;
        }
    }
    return null;
}
 
Example 7
Source File: HiveITBase.java    From atlas with Apache License 2.0 5 votes vote down vote up
protected static void addInputs(HiveMetaStoreBridge hiveBridge, HiveOperation op, SortedSet<ReadEntity> sortedInputs, StringBuilder buffer, final Map<ReadEntity, AtlasEntity> refs, final boolean ignoreHDFSPathsInQFName) throws HiveException {
    if (refs != null) {
        if (sortedInputs != null) {
            Set<String> dataSetsProcessed = new LinkedHashSet<>();
            for (Entity input : sortedInputs) {

                if (!dataSetsProcessed.contains(input.getName().toLowerCase())) {
                    //HiveOperation.QUERY type encompasses INSERT, INSERT_OVERWRITE, UPDATE, DELETE, PATH_WRITE operations
                    if (ignoreHDFSPathsInQFName &&
                            (Entity.Type.DFS_DIR.equals(input.getType()) || Entity.Type.LOCAL_DIR.equals(input.getType()))) {
                        LOG.debug("Skipping dfs dir input addition to process qualified name {} ", input.getName());
                    } else if (refs.containsKey(input)) {
                        if ( input.getType() == Entity.Type.PARTITION || input.getType() == Entity.Type.TABLE) {
                            Table inputTable = refreshTable(hiveBridge, input.getTable().getDbName(), input.getTable().getTableName());

                            if (inputTable != null) {
                                addDataset(buffer, refs.get(input), HiveMetaStoreBridge.getTableCreatedTime(inputTable));
                            }
                        } else {
                            addDataset(buffer, refs.get(input));
                        }
                    }

                    dataSetsProcessed.add(input.getName().toLowerCase());
                }
            }

        }
    }
}
 
Example 8
Source File: HiveHook.java    From incubator-atlas with Apache License 2.0 5 votes vote down vote up
private LinkedHashMap<Type, Referenceable> handleEventOutputs(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Type entityType) throws AtlasHookException {
    try {
        for (Entity entity : event.getOutputs()) {
            if (entity.getType() == entityType) {
                return createOrUpdateEntities(dgiBridge, event, entity, true);
            }
        }
        return null;
    }
    catch(Exception e) {
        throw new AtlasHookException("HiveHook.handleEventOutputs() failed.", e);
    }
}
 
Example 9
Source File: HiveHook.java    From incubator-atlas with Apache License 2.0 5 votes vote down vote up
private static Entity getEntityByType(Set<? extends Entity> entities, Type entityType) {
    for (Entity entity : entities) {
        if (entity.getType() == entityType) {
            return entity;
        }
    }
    return null;
}
 
Example 10
Source File: HiveAuthzBindingHook.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
private List<DBModelAuthorizable> getAuthzHierarchyFromEntity(Entity entity) {
  List<DBModelAuthorizable> objectHierarchy = new ArrayList<DBModelAuthorizable>();
  switch (entity.getType()) {
  case TABLE:
    objectHierarchy.add(new Database(entity.getTable().getDbName()));
    objectHierarchy.add(new Table(entity.getTable().getTableName()));
    break;
  case PARTITION:
  case DUMMYPARTITION:
    objectHierarchy.add(new Database(entity.getPartition().getTable().getDbName()));
    objectHierarchy.add(new Table(entity.getPartition().getTable().getTableName()));
    break;
  case DFS_DIR:
  case LOCAL_DIR:
    try {
      objectHierarchy.add(parseURI(entity.toString(),
          entity.getType().equals(Entity.Type.LOCAL_DIR)));
    } catch (Exception e) {
      throw new AuthorizationException("Failed to get File URI", e);
    }
    break;
  case DATABASE:
  case FUNCTION:
    // TODO use database entities from compiler instead of capturing from AST
    break;
  default:
    throw new UnsupportedOperationException("Unsupported entity type " +
        entity.getType().name());
  }
  return objectHierarchy;
}
 
Example 11
Source File: HiveHook.java    From incubator-atlas with Apache License 2.0 4 votes vote down vote up
private LinkedHashMap<Type, Referenceable> createOrUpdateEntities(HiveMetaStoreBridge dgiBridge, HiveEventContext event, Entity entity, boolean skipTempTables, Table existTable) throws AtlasHookException {
    try {
        Database db = null;
        Table table = null;
        Partition partition = null;
        LinkedHashMap<Type, Referenceable> result = new LinkedHashMap<>();
        List<Referenceable> entities = new ArrayList<>();

        switch (entity.getType()) {
            case DATABASE:
                db = entity.getDatabase();
                break;

            case TABLE:
                table = entity.getTable();
                db = dgiBridge.hiveClient.getDatabase(table.getDbName());
                break;

            case PARTITION:
                partition = entity.getPartition();
                table = partition.getTable();
                db = dgiBridge.hiveClient.getDatabase(table.getDbName());
                break;

            default:
                LOG.info("{}: entity-type not handled by Atlas hook. Ignored", entity.getType());
        }

        if (db != null) {
            db = dgiBridge.hiveClient.getDatabase(db.getName());
        }

        if (db != null) {
            Referenceable dbEntity = dgiBridge.createDBInstance(db);

            entities.add(dbEntity);
            result.put(Type.DATABASE, dbEntity);

            Referenceable tableEntity = null;

            if (table != null) {
                if (existTable != null) {
                    table = existTable;
                } else {
                    table = dgiBridge.hiveClient.getTable(table.getDbName(), table.getTableName());
                }
                //If its an external table, even though the temp table skip flag is on,
                // we create the table since we need the HDFS path to temp table lineage.
                if (skipTempTables &&
                        table.isTemporary() &&
                        !TableType.EXTERNAL_TABLE.equals(table.getTableType())) {
                    LOG.debug("Skipping temporary table registration {} since it is not an external table {} ", table.getTableName(), table.getTableType().name());

                } else {
                    tableEntity = dgiBridge.createTableInstance(dbEntity, table);
                    entities.add(tableEntity);
                    result.put(Type.TABLE, tableEntity);
                }
            }

            event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities));
        }
        return result;
    }
    catch(Exception e) {
        throw new AtlasHookException("HiveHook.createOrUpdateEntities() failed.", e);
    }
}