Java Code Examples for org.apache.kylin.common.KylinConfig#getHiveIntermediateTablePrefix()

The following examples show how to use org.apache.kylin.common.KylinConfig#getHiveIntermediateTablePrefix() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ITDoggedCubeBuilderStressTest.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void before() throws IOException {
    staticCreateTestMetadata();

    KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
    CubeManager cubeManager = CubeManager.getInstance(kylinConfig);

    cube = cubeManager.getCube("ssb");
    flatTable = LOCALMETA_TEST_DATA + "/data/" + kylinConfig.getHiveIntermediateTablePrefix()
            + "ssb_19920101000000_19920201000000.csv";
    dictionaryMap = ITInMemCubeBuilderTest.getDictionaryMap(cube, flatTable);
}
 
Example 2
Source File: ITDoggedCubeBuilderTest.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void before() throws IOException {
    staticCreateTestMetadata();

    KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
    CubeManager cubeManager = CubeManager.getInstance(kylinConfig);

    cube = cubeManager.getCube("ssb");
    flatTable = LocalFileMetadataTestCase.LOCALMETA_TEST_DATA + "/data/" + kylinConfig.getHiveIntermediateTablePrefix()
            + "ssb_19920101000000_19920201000000.csv";
    dictionaryMap = ITInMemCubeBuilderTest.getDictionaryMap(cube, flatTable);
}
 
Example 3
Source File: ITDoggedCubeBuilderStressTest.java    From kylin with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void before() throws IOException {
    staticCreateTestMetadata();

    KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
    CubeManager cubeManager = CubeManager.getInstance(kylinConfig);

    cube = cubeManager.getCube("ssb");
    flatTable = LOCALMETA_TEST_DATA + "/data/" + kylinConfig.getHiveIntermediateTablePrefix()
            + "ssb_19920101000000_19920201000000.csv";
    dictionaryMap = ITInMemCubeBuilderTest.getDictionaryMap(cube, flatTable);
}
 
Example 4
Source File: ITDoggedCubeBuilderTest.java    From kylin with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public static void before() throws IOException {
    staticCreateTestMetadata();

    KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
    CubeManager cubeManager = CubeManager.getInstance(kylinConfig);

    cube = cubeManager.getCube("ssb");
    flatTable = LocalFileMetadataTestCase.LOCALMETA_TEST_DATA + "/data/" + kylinConfig.getHiveIntermediateTablePrefix()
            + "ssb_19920101000000_19920201000000.csv";
    dictionaryMap = ITInMemCubeBuilderTest.getDictionaryMap(cube, flatTable);
}
 
Example 5
Source File: StorageCleanupJob.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
private void cleanUnusedIntermediateHiveTable(Configuration conf) throws IOException {
    final KylinConfig config = KylinConfig.getInstanceFromEnv();
    final CliCommandExecutor cmdExec = config.getCliCommandExecutor();
    final int uuidLength = 36;
    final String preFix = config.getHiveIntermediateTablePrefix();
    final String uuidPattern = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}";

    final String useDatabaseHql = "USE " + config.getHiveDatabaseForIntermediateTable() + ";";
    final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
    hiveCmdBuilder.addStatement(useDatabaseHql);
    hiveCmdBuilder.addStatement("show tables " + "\'" + preFix + "*\'" + "; ");

    Pair<Integer, String> result = cmdExec.execute(hiveCmdBuilder.build());

    String outputStr = result.getSecond();
    BufferedReader reader = new BufferedReader(new StringReader(outputStr));
    String line = null;
    List<String> allJobs = executableManager.getAllJobIds();
    List<String> allHiveTablesNeedToBeDeleted = new ArrayList<String>();
    List<String> workingJobList = new ArrayList<String>();

    StringBuilder sb = new StringBuilder();
    for (String jobId : allJobs) {
        // only remove FINISHED and DISCARDED job intermediate table
        final ExecutableState state = executableManager.getOutput(jobId).getState();
        if (!state.isFinalState()) {
            workingJobList.add(jobId);
            sb.append(jobId).append("(").append(state).append("), ");
        }
    }
    logger.info("Working jobIDs: " + workingJobList);

    while ((line = reader.readLine()) != null) {

        logger.info("Checking table " + line);

        if (!line.startsWith(preFix))
            continue;

        if (force == true) {
            logger.warn("!!!!!!!!!!!!!!!Warning: will delete all intermediate hive tables!!!!!!!!!!!!!!!!!!!!!!");
            allHiveTablesNeedToBeDeleted.add(line);
            continue;
        }

        boolean isNeedDel = true;

        if (line.length() > preFix.length() + uuidLength) {
            String uuid = line.substring(line.length() - uuidLength, line.length());
            uuid = uuid.replace("_", "-");
            final Pattern UUId_PATTERN = Pattern.compile(uuidPattern);
            if (UUId_PATTERN.matcher(uuid).matches()) {
                //Check whether it's a hive table in use
                if (isTableInUse(uuid, workingJobList)) {
                    logger.info("Skip because not isTableInUse");
                    isNeedDel = false;
                }
            } else {
                logger.info("Skip because not match pattern");
                isNeedDel = false;
            }
        } else {
            logger.info("Skip because length not qualified");
            isNeedDel = false;
        }

        if (isNeedDel) {
            allHiveTablesNeedToBeDeleted.add(line);
        }
    }

    if (delete == true) {
        hiveCmdBuilder.reset();
        hiveCmdBuilder.addStatement(useDatabaseHql);
        for (String delHive : allHiveTablesNeedToBeDeleted) {
            hiveCmdBuilder.addStatement("drop table if exists " + delHive + "; ");
            logger.info("Remove " + delHive + " from hive tables.");
        }

        try {
            cmdExec.execute(hiveCmdBuilder.build());
        } catch (IOException e) {
            e.printStackTrace();
        }
    } else {
        System.out.println("------ Intermediate Hive Tables To Be Dropped ------");
        for (String hiveTable : allHiveTablesNeedToBeDeleted) {
            System.out.println(hiveTable);
        }
        System.out.println("----------------------------------------------------");
    }

    if (reader != null)
        reader.close();
}
 
Example 6
Source File: StorageCleanupJob.java    From kylin with Apache License 2.0 4 votes vote down vote up
private void cleanUnusedIntermediateHiveTable(Configuration conf) throws IOException {
    final KylinConfig config = KylinConfig.getInstanceFromEnv();
    final CliCommandExecutor cmdExec = config.getCliCommandExecutor();
    final int uuidLength = 36;
    final String preFix = config.getHiveIntermediateTablePrefix();
    final String uuidPattern = "[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}";

    final String useDatabaseHql = "USE " + config.getHiveDatabaseForIntermediateTable() + ";";
    final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
    hiveCmdBuilder.addStatement(useDatabaseHql);
    hiveCmdBuilder.addStatement("show tables " + "\'" + preFix + "*\'" + "; ");

    Pair<Integer, String> result = cmdExec.execute(hiveCmdBuilder.build());

    String outputStr = result.getSecond();
    BufferedReader reader = new BufferedReader(new StringReader(outputStr));
    String line = null;
    List<String> allJobs = executableManager.getAllJobIds();
    List<String> allHiveTablesNeedToBeDeleted = new ArrayList<String>();
    List<String> workingJobList = new ArrayList<String>();

    StringBuilder sb = new StringBuilder();
    for (String jobId : allJobs) {
        // only remove FINISHED and DISCARDED job intermediate table
        final ExecutableState state = executableManager.getOutput(jobId).getState();
        if (!state.isFinalState()) {
            workingJobList.add(jobId);
            sb.append(jobId).append("(").append(state).append("), ");
        }
    }
    logger.info("Working jobIDs: " + workingJobList);

    while ((line = reader.readLine()) != null) {

        logger.info("Checking table " + line);

        if (!line.startsWith(preFix))
            continue;

        if (force == true) {
            logger.warn("!!!!!!!!!!!!!!!Warning: will delete all intermediate hive tables!!!!!!!!!!!!!!!!!!!!!!");
            allHiveTablesNeedToBeDeleted.add(line);
            continue;
        }

        boolean isNeedDel = true;

        if (line.length() > preFix.length() + uuidLength) {
            String uuid = line.substring(line.length() - uuidLength, line.length());
            uuid = uuid.replace("_", "-");
            final Pattern UUId_PATTERN = Pattern.compile(uuidPattern);
            if (UUId_PATTERN.matcher(uuid).matches()) {
                //Check whether it's a hive table in use
                if (isTableInUse(uuid, workingJobList)) {
                    logger.info("Skip because not isTableInUse");
                    isNeedDel = false;
                }
            } else {
                logger.info("Skip because not match pattern");
                isNeedDel = false;
            }
        } else {
            logger.info("Skip because length not qualified");
            isNeedDel = false;
        }

        if (isNeedDel) {
            allHiveTablesNeedToBeDeleted.add(line);
        }
    }

    if (delete == true) {
        hiveCmdBuilder.reset();
        hiveCmdBuilder.addStatement(useDatabaseHql);
        for (String delHive : allHiveTablesNeedToBeDeleted) {
            hiveCmdBuilder.addStatement("drop table if exists " + delHive + "; ");
            logger.info("Remove " + delHive + " from hive tables.");
        }

        try {
            cmdExec.execute(hiveCmdBuilder.build());
        } catch (IOException e) {
            e.printStackTrace();
        }
    } else {
        System.out.println("------ Intermediate Hive Tables To Be Dropped ------");
        for (String hiveTable : allHiveTablesNeedToBeDeleted) {
            System.out.println(hiveTable);
        }
        System.out.println("----------------------------------------------------");
    }

    if (reader != null)
        reader.close();
}