Java Code Examples for org.apache.hadoop.hive.ql.hooks.Entity

The following examples show how to use org.apache.hadoop.hive.ql.hooks.Entity. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: atlas   Source File: DropDatabase.java    License: Apache License 2.0 6 votes vote down vote up
private List<AtlasObjectId> getHiveEntities() {
    List<AtlasObjectId> ret = new ArrayList<>();

    for (Entity entity : getOutputs()) {
        if (entity.getType() == DATABASE) {
            String        dbQName = getQualifiedName(entity.getDatabase());
            AtlasObjectId dbId    = new AtlasObjectId(HIVE_TYPE_DB, ATTRIBUTE_QUALIFIED_NAME, dbQName);

            context.removeFromKnownDatabase(dbQName);

            ret.add(dbId);
        } else if (entity.getType() == TABLE) {
            String        tblQName = getQualifiedName(entity.getTable());
            AtlasObjectId tblId    = new AtlasObjectId(HIVE_TYPE_TABLE, ATTRIBUTE_QUALIFIED_NAME, tblQName);

            context.removeFromKnownTable(tblQName);

            ret.add(tblId);
        }
    }

    return ret;
}
 
Example 2
Source Project: atlas   Source File: DropTable.java    License: Apache License 2.0 6 votes vote down vote up
public List<AtlasObjectId> getHiveEntities() {
    List<AtlasObjectId> ret = new ArrayList<>();

    for (Entity entity : getOutputs()) {
        if (entity.getType() == Entity.Type.TABLE) {
            String        tblQName = getQualifiedName(entity.getTable());
            AtlasObjectId tblId    = new AtlasObjectId(HIVE_TYPE_TABLE, ATTRIBUTE_QUALIFIED_NAME, tblQName);

            context.removeFromKnownTable(tblQName);

            ret.add(tblId);
        }
    }

    return ret;
}
 
Example 3
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testCTAS() throws Exception {
    String tableName     = createTable();
    String ctasTableName = "table" + random();
    String query         = "create table " + ctasTableName + " as select * from " + tableName;

    runCommand(query);

    final Set<ReadEntity> readEntities = getInputs(tableName, Entity.Type.TABLE);
    final Set<WriteEntity> writeEntities = getOutputs(ctasTableName, Entity.Type.TABLE);

    HiveEventContext hiveEventContext = constructEvent(query, HiveOperation.CREATETABLE_AS_SELECT, readEntities,
            writeEntities);
    AtlasEntity processEntity1 = validateProcess(hiveEventContext);
    AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, hiveEventContext);
    AtlasObjectId process = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute(
            BaseHiveEvent.ATTRIBUTE_PROCESS));
    Assert.assertEquals(process.getGuid(), processEntity1.getGuid());

    Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1);
    assertTableIsRegistered(DEFAULT_DB, ctasTableName);
}
 
Example 4
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testCreateView() throws Exception {
    String tableName = createTable();
    String viewName  = tableName();
    String query     = "create view " + viewName + " as select * from " + tableName;

    runCommand(query);

    HiveEventContext hiveEventContext = constructEvent(query, HiveOperation.CREATEVIEW, getInputs(tableName,
            Entity.Type.TABLE), getOutputs(viewName, Entity.Type.TABLE));
    AtlasEntity processEntity1 = validateProcess(hiveEventContext);
    AtlasEntity processExecutionEntity1 = validateProcessExecution(processEntity1, hiveEventContext);
    AtlasObjectId process1 = toAtlasObjectId(processExecutionEntity1.getRelationshipAttribute(
            BaseHiveEvent.ATTRIBUTE_PROCESS));
    Assert.assertEquals(process1.getGuid(), processEntity1.getGuid());
    Assert.assertEquals(numberOfProcessExecutions(processEntity1), 1);
    assertTableIsRegistered(DEFAULT_DB, viewName);

    String viewId          = assertTableIsRegistered(DEFAULT_DB, viewName);
    AtlasEntity viewEntity = atlasClientV2.getEntityByGuid(viewId).getEntity();
    List ddlQueries        = (List) viewEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES);

    Assert.assertNotNull(ddlQueries);
    Assert.assertEquals(ddlQueries.size(), 1);
}
 
Example 5
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testLoadLocalPath() throws Exception {
    String tableName = createTable(false);
    String loadFile  = file("load");
    String query     = "load data local inpath 'file://" + loadFile + "' into table " + tableName;

    String tblId = assertTableIsRegistered(DEFAULT_DB, tableName);

    runCommand(query);

    AtlasEntity tblEntity  = atlasClientV2.getEntityByGuid(tblId).getEntity();
    List ddlQueries        = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES);

    Assert.assertNotNull(ddlQueries);
    Assert.assertEquals(ddlQueries.size(), 1);

    assertProcessIsRegistered(constructEvent(query, HiveOperation.LOAD, null, getOutputs(tableName, Entity.Type.TABLE)));
}
 
Example 6
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testLoadLocalPathIntoPartition() throws Exception {
    String tableName = createTable(true);
    String loadFile  = file("load");
    String query     = "load data local inpath 'file://" + loadFile + "' into table " + tableName +  " partition(dt = '"+ PART_FILE + "')";

    String tblId = assertTableIsRegistered(DEFAULT_DB, tableName);

    runCommand(query);

    AtlasEntity tblEntity  = atlasClientV2.getEntityByGuid(tblId).getEntity();
    List ddlQueries        = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES);

    Assert.assertNotNull(ddlQueries);
    Assert.assertEquals(ddlQueries.size(), 1);

    assertProcessIsRegistered(constructEvent(query, HiveOperation.LOAD, null, getOutputs(tableName, Entity.Type.TABLE)));
}
 
Example 7
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testInsertIntoLocalDir() throws Exception {
    String tableName       = createTable();
    String randomLocalPath = mkdir("hiverandom.tmp");
    String query           = "insert overwrite LOCAL DIRECTORY '" + randomLocalPath + "' select id, name from " + tableName;

    runCommand(query);

    HiveEventContext event = constructEvent(query,  HiveOperation.QUERY,
            getInputs(tableName, Entity.Type.TABLE), null);
    AtlasEntity hiveProcess = validateProcess(event);
    AtlasEntity hiveProcessExecution = validateProcessExecution(hiveProcess, event);
    AtlasObjectId process = toAtlasObjectId(hiveProcessExecution.getRelationshipAttribute(
            BaseHiveEvent.ATTRIBUTE_PROCESS));
    Assert.assertEquals(process.getGuid(), hiveProcess.getGuid());
    Assert.assertEquals(numberOfProcessExecutions(hiveProcess), 1);

    String tblId          = assertTableIsRegistered(DEFAULT_DB, tableName);

    AtlasEntity tblEntity = atlasClientV2.getEntityByGuid(tblId).getEntity();
    List ddlQueries       = (List) tblEntity.getRelationshipAttribute(ATTRIBUTE_DDL_QUERIES);

    Assert.assertNotNull(ddlQueries);
    Assert.assertEquals(ddlQueries.size(), 1);
}
 
Example 8
Source Project: atlas   Source File: HiveITBase.java    License: Apache License 2.0 6 votes vote down vote up
protected static boolean addQueryType(HiveOperation op, WriteEntity entity) {
    if (entity.getWriteType() != null && HiveOperation.QUERY.equals(op)) {
        switch (entity.getWriteType()) {
            case INSERT:
            case INSERT_OVERWRITE:
            case UPDATE:
            case DELETE:
                return true;
            case PATH_WRITE:
                //Add query type only for DFS paths and ignore local paths since they are not added as outputs
                if ( !Entity.Type.LOCAL_DIR.equals(entity.getType())) {
                    return true;
                }
                break;
            default:
        }
    }
    return false;
}
 
Example 9
Source Project: incubator-atlas   Source File: HiveHook.java    License: Apache License 2.0 6 votes vote down vote up
private static void addInputs(HiveMetaStoreBridge hiveBridge, HiveOperation op, SortedSet<ReadEntity> sortedInputs, StringBuilder buffer, final Map<ReadEntity, Referenceable> refs, final boolean ignoreHDFSPathsInQFName) throws HiveException {
    if (refs != null) {
        if (sortedInputs != null) {
            Set<String> dataSetsProcessed = new LinkedHashSet<>();
            for (Entity input : sortedInputs) {

                if (!dataSetsProcessed.contains(input.getName().toLowerCase())) {
                    //HiveOperation.QUERY type encompasses INSERT, INSERT_OVERWRITE, UPDATE, DELETE, PATH_WRITE operations
                    if (ignoreHDFSPathsInQFName &&
                        (Type.DFS_DIR.equals(input.getType()) || Type.LOCAL_DIR.equals(input.getType()))) {
                        LOG.debug("Skipping dfs dir input addition to process qualified name {} ", input.getName());
                    } else if (refs.containsKey(input)) {
                        if ( input.getType() == Type.PARTITION || input.getType() == Type.TABLE) {
                            final Date createTime = HiveMetaStoreBridge.getTableCreatedTime(hiveBridge.hiveClient.getTable(input.getTable().getDbName(), input.getTable().getTableName()));
                            addDataset(buffer, refs.get(input), createTime.getTime());
                        } else {
                            addDataset(buffer, refs.get(input));
                        }
                    }
                    dataSetsProcessed.add(input.getName().toLowerCase());
                }
            }

        }
    }
}
 
Example 10
Source Project: incubator-atlas   Source File: HiveHookIT.java    License: Apache License 2.0 6 votes vote down vote up
private Set<ReadEntity> getInputs(String inputName, Entity.Type entityType) throws HiveException {
    final ReadEntity entity = new ReadEntity();

    if ( Entity.Type.DFS_DIR.equals(entityType)) {
        entity.setName(lower(new Path(inputName).toString()));
        entity.setTyp(Entity.Type.DFS_DIR);
    } else {
        entity.setName(getQualifiedTblName(inputName));
        entity.setTyp(entityType);
    }

    if (entityType == Entity.Type.TABLE) {
        entity.setT(hiveMetaStoreBridge.hiveClient.getTable(DEFAULT_DB, inputName));
    }

    return new LinkedHashSet<ReadEntity>() {{ add(entity); }};
}
 
Example 11
Source Project: incubator-atlas   Source File: HiveHookIT.java    License: Apache License 2.0 6 votes vote down vote up
private Set<WriteEntity> getOutputs(String inputName, Entity.Type entityType) throws HiveException {
    final WriteEntity entity = new WriteEntity();

    if ( Entity.Type.DFS_DIR.equals(entityType) || Entity.Type.LOCAL_DIR.equals(entityType)) {
        entity.setName(lower(new Path(inputName).toString()));
        entity.setTyp(entityType);
    } else {
        entity.setName(getQualifiedTblName(inputName));
        entity.setTyp(entityType);
    }

    if (entityType == Entity.Type.TABLE) {
        entity.setT(hiveMetaStoreBridge.hiveClient.getTable(DEFAULT_DB, inputName));
    }
    return new LinkedHashSet<WriteEntity>() {{ add(entity); }};
}
 
Example 12
Source Project: incubator-atlas   Source File: HiveHookIT.java    License: Apache License 2.0 6 votes vote down vote up
@Test(enabled = false)
public void testInsertIntoTempTable() throws Exception {
    String tableName = createTable();
    String insertTableName = createTable(false, false, true);
    assertTableIsRegistered(DEFAULT_DB, tableName);
    assertTableIsNotRegistered(DEFAULT_DB, insertTableName, true);

    String query =
        "insert into " + insertTableName + " select id, name from " + tableName;

    runCommand(query);

    Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE);
    Set<WriteEntity> outputs = getOutputs(insertTableName, Entity.Type.TABLE);
    outputs.iterator().next().setName(getQualifiedTblName(insertTableName + HiveMetaStoreBridge.TEMP_TABLE_PREFIX + SessionState.get().getSessionId()));
    outputs.iterator().next().setWriteType(WriteEntity.WriteType.INSERT);

    validateProcess(constructEvent(query,  HiveOperation.QUERY, inputs, outputs));

    assertTableIsRegistered(DEFAULT_DB, tableName);
    assertTableIsRegistered(DEFAULT_DB, insertTableName, null, true);
}
 
Example 13
Source Project: incubator-atlas   Source File: HiveHookIT.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testTruncateTable() throws Exception {
    String tableName = createTable(false);
    String query = String.format("truncate table %s", tableName);
    runCommand(query);

    Set<WriteEntity> outputs = getOutputs(tableName, Entity.Type.TABLE);

    String tableId = assertTableIsRegistered(DEFAULT_DB, tableName);
    validateProcess(constructEvent(query, HiveOperation.TRUNCATETABLE, null, outputs));

    //Check lineage
    String datasetName = HiveMetaStoreBridge.getTableQualifiedName(CLUSTER_NAME, DEFAULT_DB, tableName);
    JSONObject response = atlasClient.getInputGraph(datasetName);
    JSONObject vertices = response.getJSONObject("values").getJSONObject("vertices");
    //Below should be assertTrue - Fix https://issues.apache.org/jira/browse/ATLAS-653
    Assert.assertFalse(vertices.has(tableId));
}
 
Example 14
Source Project: atlas   Source File: CreateTable.java    License: Apache License 2.0 5 votes vote down vote up
public AtlasEntitiesWithExtInfo getHiveEntities() throws Exception {
    AtlasEntitiesWithExtInfo ret   = new AtlasEntitiesWithExtInfo();
    Table                    table = null;

    if (CollectionUtils.isNotEmpty(getOutputs())) {
        for (Entity entity : getOutputs()) {
            if (entity.getType() == Entity.Type.TABLE) {
                table = entity.getTable();

                if (table != null) {
                    table = getHive().getTable(table.getDbName(), table.getTableName());

                    if (table != null) {
                        if (skipTemporaryTable(table)) {
                            table = null;
                        } else {
                            break;
                        }
                    }
                }
            }
        }
    }

    processTable(table, ret);

    addProcessedEntities(ret);

    return ret;
}
 
Example 15
Source Project: atlas   Source File: CreateDatabase.java    License: Apache License 2.0 5 votes vote down vote up
public AtlasEntitiesWithExtInfo getHiveEntities() throws Exception {
    AtlasEntitiesWithExtInfo ret = new AtlasEntitiesWithExtInfo();

    for (Entity entity : getOutputs()) {
        if (entity.getType() == DATABASE) {
            Database db = entity.getDatabase();

            if (db != null) {
                db = getHive().getDatabase(getDatabaseName(db));
            }

            if (db != null) {
                AtlasEntity dbEntity    = toDbEntity(db);
                AtlasEntity dbDDLEntity = createHiveDDLEntity(dbEntity);

                ret.addEntity(dbEntity);

                if (dbDDLEntity != null) {
                    ret.addEntity(dbDDLEntity);
                }
            } else {
                LOG.error("CreateDatabase.getEntities(): failed to retrieve db");
            }
        }
    }

    addProcessedEntities(ret);

    return ret;
}
 
Example 16
Source Project: atlas   Source File: CreateHiveProcess.java    License: Apache License 2.0 5 votes vote down vote up
private boolean skipProcess() {
    Set<ReadEntity>  inputs  = getInputs();
    Set<WriteEntity> outputs = getOutputs();

    boolean ret = CollectionUtils.isEmpty(inputs) && CollectionUtils.isEmpty(outputs);

    if (!ret) {
        if (getContext().getHiveOperation() == HiveOperation.QUERY) {
            // Select query has only one output
            if (outputs.size() == 1) {
                WriteEntity output = outputs.iterator().next();

                if (output.getType() == Entity.Type.DFS_DIR || output.getType() == Entity.Type.LOCAL_DIR) {
                    if (output.getWriteType() == WriteEntity.WriteType.PATH_WRITE && output.isTempURI()) {
                        ret = true;
                    }
                }
                // DELETE and UPDATE initially have one input and one output.
                // Since they do not support sub-query, they won't create a lineage that have one input and one output. (One input only)
                // It's safe to filter them out here.
                if (output.getWriteType() == WriteEntity.WriteType.DELETE || output.getWriteType() == WriteEntity.WriteType.UPDATE) {
                    ret = true;
                }
            }
        }
    }

    return ret;
}
 
Example 17
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 5 votes vote down vote up
private Set<ReadEntity> getInputs(String inputName, Entity.Type entityType) throws HiveException {
    final ReadEntity entity;

    if (Entity.Type.DFS_DIR.equals(entityType)) {
        entity = new TestReadEntity(lower(new Path(inputName).toString()), entityType);
    } else {
        entity = new TestReadEntity(getQualifiedTblName(inputName), entityType);
    }

    if (entityType == Entity.Type.TABLE) {
        entity.setT(hiveMetaStoreBridge.getHiveClient().getTable(DEFAULT_DB, inputName));
    }

    return new LinkedHashSet<ReadEntity>() {{ add(entity); }};
}
 
Example 18
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 5 votes vote down vote up
private Set<WriteEntity> getOutputs(String inputName, Entity.Type entityType) throws HiveException {
    final WriteEntity entity;

    if (Entity.Type.DFS_DIR.equals(entityType) || Entity.Type.LOCAL_DIR.equals(entityType)) {
        entity = new TestWriteEntity(lower(new Path(inputName).toString()), entityType);
    } else {
        entity = new TestWriteEntity(getQualifiedTblName(inputName), entityType);
    }

    if (entityType == Entity.Type.TABLE) {
        entity.setT(hiveMetaStoreBridge.getHiveClient().getTable(DEFAULT_DB, inputName));
    }

    return new LinkedHashSet<WriteEntity>() {{ add(entity); }};
}
 
Example 19
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 5 votes vote down vote up
private void validateTables(List<AtlasObjectId> tableIds, Set<? extends Entity> expectedTables) throws Exception {
    if (tableIds == null) {
        Assert.assertTrue(CollectionUtils.isEmpty(expectedTables));
    } else if (expectedTables == null) {
        Assert.assertTrue(CollectionUtils.isEmpty(tableIds));
    } else {
        Assert.assertEquals(tableIds.size(), expectedTables.size());

        List<String> entityQualifiedNames = new ArrayList<>(tableIds.size());
        List<String> expectedTableNames   = new ArrayList<>(expectedTables.size());

        for (AtlasObjectId tableId : tableIds) {
            AtlasEntity atlasEntity = atlasClientV2.getEntityByGuid(tableId.getGuid()).getEntity();

            entityQualifiedNames.add((String) atlasEntity.getAttribute(ATTRIBUTE_QUALIFIED_NAME));
        }

        for (Iterator<? extends Entity> iterator = expectedTables.iterator(); iterator.hasNext(); ) {
            Entity hiveEntity = iterator.next();

            expectedTableNames.add(hiveEntity.getName());
        }

        for (String entityQualifiedName : entityQualifiedNames) {
            boolean found = false;

            for (String expectedTableName : expectedTableNames) {
                if (entityQualifiedName.startsWith(expectedTableName)) {
                    found = true;

                    break;
                }
            }

            assertTrue(found, "Table name '" + entityQualifiedName + "' does not start with any name in the expected list " + expectedTableNames);
        }
    }
}
 
Example 20
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 5 votes vote down vote up
@Test(enabled = false)
public void testInsertIntoTempTable() throws Exception {
    String tableName       = createTable();
    String insertTableName = createTable(false, false, true);

    assertTableIsRegistered(DEFAULT_DB, tableName);
    assertTableIsNotRegistered(DEFAULT_DB, insertTableName, true);

    String query = "insert into " + insertTableName + " select id, name from " + tableName;

    runCommand(query);

    Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE);
    Set<WriteEntity> outputs = getOutputs(insertTableName, Entity.Type.TABLE);

    outputs.iterator().next().setWriteType(WriteEntity.WriteType.INSERT);

    HiveEventContext event = constructEvent(query,  HiveOperation.QUERY, inputs, outputs);
    AtlasEntity hiveProcess = validateProcess(event);
    AtlasEntity hiveProcessExecution = validateProcessExecution(hiveProcess, event);
    AtlasObjectId process = toAtlasObjectId(hiveProcessExecution.getRelationshipAttribute(
            BaseHiveEvent.ATTRIBUTE_PROCESS));
    Assert.assertEquals(process.getGuid(), hiveProcess.getGuid());
    Assert.assertEquals(numberOfProcessExecutions(hiveProcess), 1);

    assertTableIsRegistered(DEFAULT_DB, tableName);
    assertTableIsRegistered(DEFAULT_DB, insertTableName, null, true);
}
 
Example 21
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testIgnoreTruncateTable() throws Exception {
    String tableName = createTable(false);
    String query     = String.format("truncate table %s", tableName);

    runCommand(query);

    Set<WriteEntity> outputs = getOutputs(tableName, Entity.Type.TABLE);
    HiveEventContext event   = constructEvent(query, HiveOperation.TRUNCATETABLE, null, outputs);

    assertTableIsRegistered(DEFAULT_DB, tableName);
    assertProcessIsNotRegistered(event);
}
 
Example 22
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testAlterDBProperties() throws Exception {
    String dbName   = createDatabase();
    String fmtQuery = "alter database %s %s DBPROPERTIES (%s)";

    testAlterProperties(Entity.Type.DATABASE, dbName, fmtQuery);
}
 
Example 23
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testAlterTableProperties() throws Exception {
    String tableName = createTable();
    String fmtQuery  = "alter table %s %s TBLPROPERTIES (%s)";

    testAlterProperties(Entity.Type.TABLE, tableName, fmtQuery);
}
 
Example 24
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 5 votes vote down vote up
private void testAlterProperties(Entity.Type entityType, String entityName, String fmtQuery) throws Exception {
    String              SET_OP        = "set";
    String              UNSET_OP      = "unset";
    Map<String, String> expectedProps = new HashMap<String, String>() {{
        put("testPropKey1", "testPropValue1");
        put("comment", "test comment");
    }};

    String query = String.format(fmtQuery, entityName, SET_OP, getSerializedProps(expectedProps));

    runCommandWithDelay(query, 3000);

    verifyEntityProperties(entityType, entityName, expectedProps, false);

    expectedProps.put("testPropKey2", "testPropValue2");
    //Add another property

    query = String.format(fmtQuery, entityName, SET_OP, getSerializedProps(expectedProps));

    runCommandWithDelay(query, 3000);

    verifyEntityProperties(entityType, entityName, expectedProps, false);

    if (entityType != Entity.Type.DATABASE) {
        //Database unset properties doesnt work - alter database %s unset DBPROPERTIES doesnt work
        //Unset all the props
        StringBuilder sb = new StringBuilder("'");

        query = String.format(fmtQuery, entityName, UNSET_OP, Joiner.on("','").skipNulls().appendTo(sb, expectedProps.keySet()).append('\''));

        runCommandWithDelay(query, 3000);

        verifyEntityProperties(entityType, entityName, expectedProps, true);
    }
}
 
Example 25
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testAlterViewProperties() throws Exception {
    String tableName = createTable();
    String viewName  = tableName();
    String query     = "create view " + viewName + " as select * from " + tableName;

    runCommand(query);

    String fmtQuery = "alter view %s %s TBLPROPERTIES (%s)";

    testAlterProperties(Entity.Type.TABLE, viewName, fmtQuery);
}
 
Example 26
Source Project: atlas   Source File: HiveHookIT.java    License: Apache License 2.0 5 votes vote down vote up
private <T extends Entity> SortedMap<T, AtlasEntity> getSortedProcessDataSets(Set<T> inputTbls) {
    SortedMap<T, AtlasEntity> inputs = new TreeMap<>(entityComparator);

    if (inputTbls != null) {
        for (final T tbl : inputTbls) {
            AtlasEntity inputTableRef = new AtlasEntity(getDSTypeName(tbl), new HashMap<String, Object>() {{
                put(ATTRIBUTE_QUALIFIED_NAME, tbl.getName());
            }});

            inputs.put(tbl, inputTableRef);
        }
    }
    return inputs;
}
 
Example 27
Source Project: atlas   Source File: HiveITBase.java    License: Apache License 2.0 5 votes vote down vote up
@VisibleForTesting
protected static String getProcessQualifiedName(HiveMetaStoreBridge dgiBridge, HiveEventContext eventContext,
                                      final SortedSet<ReadEntity> sortedHiveInputs,
                                      final SortedSet<WriteEntity> sortedHiveOutputs,
                                      SortedMap<ReadEntity, AtlasEntity> hiveInputsMap,
                                      SortedMap<WriteEntity, AtlasEntity> hiveOutputsMap) throws HiveException {
    HiveOperation op = eventContext.getOperation();
    if (isCreateOp(eventContext)) {
        Entity entity = getEntityByType(sortedHiveOutputs, Entity.Type.TABLE);

        if (entity != null) {
            Table outTable = entity.getTable();
            //refresh table
            outTable = dgiBridge.getHiveClient().getTable(outTable.getDbName(), outTable.getTableName());
            return HiveMetaStoreBridge.getTableProcessQualifiedName(dgiBridge.getMetadataNamespace(), outTable);
        }
    }

    StringBuilder buffer = new StringBuilder(op.getOperationName());

    boolean ignoreHDFSPathsinQFName = ignoreHDFSPathsinQFName(op, sortedHiveInputs, sortedHiveOutputs);
    if ( ignoreHDFSPathsinQFName && LOG.isDebugEnabled()) {
        LOG.debug("Ignoring HDFS paths in qualifiedName for {} {} ", op, eventContext.getQueryStr());
    }

    addInputs(dgiBridge, op, sortedHiveInputs, buffer, hiveInputsMap, ignoreHDFSPathsinQFName);
    buffer.append(IO_SEP);
    addOutputs(dgiBridge, op, sortedHiveOutputs, buffer, hiveOutputsMap, ignoreHDFSPathsinQFName);
    LOG.info("Setting process qualified name to {}", buffer);
    return buffer.toString();
}
 
Example 28
Source Project: atlas   Source File: HiveITBase.java    License: Apache License 2.0 5 votes vote down vote up
protected static Entity getEntityByType(Set<? extends Entity> entities, Entity.Type entityType) {
    for (Entity entity : entities) {
        if (entity.getType() == entityType) {
            return entity;
        }
    }
    return null;
}
 
Example 29
Source Project: atlas   Source File: HiveITBase.java    License: Apache License 2.0 5 votes vote down vote up
protected static boolean isPartitionBasedQuery(Set<? extends Entity> entities) {
    for (Entity entity : entities) {
        if (Entity.Type.PARTITION.equals(entity.getType())) {
            return true;
        }
    }
    return false;
}
 
Example 30
Source Project: atlas   Source File: HiveITBase.java    License: Apache License 2.0 5 votes vote down vote up
protected static void addInputs(HiveMetaStoreBridge hiveBridge, HiveOperation op, SortedSet<ReadEntity> sortedInputs, StringBuilder buffer, final Map<ReadEntity, AtlasEntity> refs, final boolean ignoreHDFSPathsInQFName) throws HiveException {
    if (refs != null) {
        if (sortedInputs != null) {
            Set<String> dataSetsProcessed = new LinkedHashSet<>();
            for (Entity input : sortedInputs) {

                if (!dataSetsProcessed.contains(input.getName().toLowerCase())) {
                    //HiveOperation.QUERY type encompasses INSERT, INSERT_OVERWRITE, UPDATE, DELETE, PATH_WRITE operations
                    if (ignoreHDFSPathsInQFName &&
                            (Entity.Type.DFS_DIR.equals(input.getType()) || Entity.Type.LOCAL_DIR.equals(input.getType()))) {
                        LOG.debug("Skipping dfs dir input addition to process qualified name {} ", input.getName());
                    } else if (refs.containsKey(input)) {
                        if ( input.getType() == Entity.Type.PARTITION || input.getType() == Entity.Type.TABLE) {
                            Table inputTable = refreshTable(hiveBridge, input.getTable().getDbName(), input.getTable().getTableName());

                            if (inputTable != null) {
                                addDataset(buffer, refs.get(input), HiveMetaStoreBridge.getTableCreatedTime(inputTable));
                            }
                        } else {
                            addDataset(buffer, refs.get(input));
                        }
                    }

                    dataSetsProcessed.add(input.getName().toLowerCase());
                }
            }

        }
    }
}