Java Code Examples for org.apache.kylin.cube.CubeSegment#getLastBuildJobID()

The following examples show how to use org.apache.kylin.cube.CubeSegment#getLastBuildJobID() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: Coordinator.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
private boolean isInOptimize(CubeInstance cube) {
    Segments<CubeSegment> readyPendingSegments = cube.getSegments(SegmentStatusEnum.READY_PENDING);
    if (readyPendingSegments.size() > 0) {
        logger.info("The cube {} has READY_PENDING segments {}. It's not allowed for building", cube.getName(),
                readyPendingSegments);
        return true;
    }
    Segments<CubeSegment> newSegments = cube.getSegments(SegmentStatusEnum.NEW);
    for (CubeSegment newSegment : newSegments) {
        String jobId = newSegment.getLastBuildJobID();
        if (jobId == null) {
            continue;
        }
        AbstractExecutable job = getExecutableManager().getJob(jobId);
        if (job != null && job instanceof CubingJob) {
            CubingJob cubingJob = (CubingJob) job;
            if (CubingJob.CubingJobTypeEnum.OPTIMIZE.toString().equals(cubingJob.getJobType())) {
                logger.info(
                        "The cube {} is in optimization. It's not allowed to build new segments during optimization.",
                        cube.getName());
                return true;
            }
        }
    }
    return false;
}
 
Example 2
Source File: BuildJobSubmitter.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
private boolean isInOptimize(CubeInstance cube) {
    Segments<CubeSegment> readyPendingSegments = cube.getSegments(SegmentStatusEnum.READY_PENDING);
    if (readyPendingSegments.size() > 0) {
        logger.info("The cube {} has READY_PENDING segments {}. It's not allowed for building",
            cube.getName(), readyPendingSegments);
        return true;
    }
    Segments<CubeSegment> newSegments = cube.getSegments(SegmentStatusEnum.NEW);
    for (CubeSegment newSegment : newSegments) {
        String jobId = newSegment.getLastBuildJobID();
        if (jobId == null) {
            continue;
        }
        AbstractExecutable job = coordinator.getExecutableManager().getJob(jobId);
        if (job != null && job instanceof CubingJob) {
            CubingJob cubingJob = (CubingJob) job;
            if (CubingJob.CubingJobTypeEnum.OPTIMIZE.toString().equals(cubingJob.getJobType())) {
                logger.info("The cube {} is in optimization. It's not allowed to build new segments during optimization.", cube.getName());
                return true;
            }
        }
    }
    return false;
}
 
Example 3
Source File: KylinHealthCheckJob.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
private void checkSegmentHDFSPath(List<CubeInstance> cubes) throws IOException {
    reporter.log("## Fix missing HDFS path of segments");
    FileSystem defaultFs = HadoopUtil.getWorkingFileSystem();
    for (CubeInstance cube : cubes) {
        for (CubeSegment segment : cube.getSegments()) {
            String jobUuid = segment.getLastBuildJobID();
            if (jobUuid != null && jobUuid.equals("") == false) {
                String path = JobBuilderSupport.getJobWorkingDir(config.getHdfsWorkingDirectory(), jobUuid);
                if (!defaultFs.exists(new Path(path))) {
                    reporter.log(
                            "Project: {} cube: {} segment: {} cube id data: {} don't exist and need to rebuild it",
                            cube.getProject(), cube.getName(), segment, path);
                    reporter.log(
                            "The rebuild url: -d '{\"startTime\":{}, \"endTime\":{}, \"buildType\":\"REFRESH\"}' /kylin/api/cubes/{}/build",
                            segment.getTSRange().start, segment.getTSRange().end, cube.getName());
                }
            }
        }
    }
}
 
Example 4
Source File: Coordinator.java    From kylin with Apache License 2.0 6 votes vote down vote up
private boolean isInOptimize(CubeInstance cube) {
    Segments<CubeSegment> readyPendingSegments = cube.getSegments(SegmentStatusEnum.READY_PENDING);
    if (readyPendingSegments.size() > 0) {
        logger.info("The cube {} has READY_PENDING segments {}. It's not allowed for building", cube.getName(),
                readyPendingSegments);
        return true;
    }
    Segments<CubeSegment> newSegments = cube.getSegments(SegmentStatusEnum.NEW);
    for (CubeSegment newSegment : newSegments) {
        String jobId = newSegment.getLastBuildJobID();
        if (jobId == null) {
            continue;
        }
        AbstractExecutable job = getExecutableManager().getJob(jobId);
        if (job != null && job instanceof CubingJob) {
            CubingJob cubingJob = (CubingJob) job;
            if (CubingJob.CubingJobTypeEnum.OPTIMIZE.toString().equals(cubingJob.getJobType())) {
                logger.info(
                        "The cube {} is in optimization. It's not allowed to build new segments during optimization.",
                        cube.getName());
                return true;
            }
        }
    }
    return false;
}
 
Example 5
Source File: BuildJobSubmitter.java    From kylin with Apache License 2.0 6 votes vote down vote up
private boolean isInOptimize(CubeInstance cube) {
    Segments<CubeSegment> readyPendingSegments = cube.getSegments(SegmentStatusEnum.READY_PENDING);
    if (readyPendingSegments.size() > 0) {
        logger.info("The cube {} has READY_PENDING segments {}. It's not allowed for building",
            cube.getName(), readyPendingSegments);
        return true;
    }
    Segments<CubeSegment> newSegments = cube.getSegments(SegmentStatusEnum.NEW);
    for (CubeSegment newSegment : newSegments) {
        String jobId = newSegment.getLastBuildJobID();
        if (jobId == null) {
            continue;
        }
        AbstractExecutable job = coordinator.getExecutableManager().getJob(jobId);
        if (job != null && job instanceof CubingJob) {
            CubingJob cubingJob = (CubingJob) job;
            if (CubingJob.CubingJobTypeEnum.OPTIMIZE.toString().equals(cubingJob.getJobType())) {
                logger.info("The cube {} is in optimization. It's not allowed to build new segments during optimization.", cube.getName());
                return true;
            }
        }
    }
    return false;
}
 
Example 6
Source File: KylinHealthCheckJob.java    From kylin with Apache License 2.0 6 votes vote down vote up
private void checkSegmentHDFSPath(List<CubeInstance> cubes) throws IOException {
    reporter.log("## Fix missing HDFS path of segments");
    FileSystem defaultFs = HadoopUtil.getWorkingFileSystem();
    for (CubeInstance cube : cubes) {
        for (CubeSegment segment : cube.getSegments()) {
            String jobUuid = segment.getLastBuildJobID();
            if (jobUuid != null && jobUuid.equals("") == false) {
                String path = JobBuilderSupport.getJobWorkingDir(config.getHdfsWorkingDirectory(), jobUuid);
                if (!defaultFs.exists(new Path(path))) {
                    reporter.log(
                            "Project: {} cube: {} segment: {} cube id data: {} don't exist and need to rebuild it",
                            cube.getProject(), cube.getName(), segment, path);
                    reporter.log(
                            "The rebuild url: -d '{\"startTime\":{}, \"endTime\":{}, \"buildType\":\"REFRESH\"}' /kylin/api/cubes/{}/build",
                            segment.getTSRange().start, segment.getTSRange().end, cube.getName());
                }
            }
        }
    }
}
 
Example 7
Source File: CubeMigrationCLI.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
protected void renameFoldersInHdfs(CubeInstance cube) throws IOException {
    for (CubeSegment segment : cube.getSegments()) {

        String jobUuid = segment.getLastBuildJobID();
        String src = JobBuilderSupport.getJobWorkingDir(srcConfig.getHdfsWorkingDirectory(), jobUuid);
        String tgt = JobBuilderSupport.getJobWorkingDir(dstConfig.getHdfsWorkingDirectory(), jobUuid);

        operations.add(new Opt(OptType.RENAME_FOLDER_IN_HDFS, new Object[] { src, tgt }));
    }

}
 
Example 8
Source File: CubeMigrationCLI.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
private static void renameFoldersInHdfs(CubeInstance cube) {
    for (CubeSegment segment : cube.getSegments()) {

        String jobUuid = segment.getLastBuildJobID();
        String src = JobBuilderSupport.getJobWorkingDir(srcConfig.getHdfsWorkingDirectory(), jobUuid);
        String tgt = JobBuilderSupport.getJobWorkingDir(dstConfig.getHdfsWorkingDirectory(), jobUuid);

        operations.add(new Opt(OptType.RENAME_FOLDER_IN_HDFS, new Object[] { src, tgt }));
    }

}
 
Example 9
Source File: CubeMigrationCLI.java    From kylin with Apache License 2.0 5 votes vote down vote up
protected void renameFoldersInHdfs(CubeInstance cube) throws IOException {
    for (CubeSegment segment : cube.getSegments()) {

        String jobUuid = segment.getLastBuildJobID();
        String src = JobBuilderSupport.getJobWorkingDir(srcConfig.getHdfsWorkingDirectory(), jobUuid);
        String tgt = JobBuilderSupport.getJobWorkingDir(dstConfig.getHdfsWorkingDirectory(), jobUuid);

        operations.add(new Opt(OptType.RENAME_FOLDER_IN_HDFS, new Object[] { src, tgt }));
    }

}
 
Example 10
Source File: CubeMigrationCLI.java    From kylin with Apache License 2.0 5 votes vote down vote up
private static void renameFoldersInHdfs(CubeInstance cube) {
    for (CubeSegment segment : cube.getSegments()) {

        String jobUuid = segment.getLastBuildJobID();
        String src = JobBuilderSupport.getJobWorkingDir(srcConfig.getHdfsWorkingDirectory(), jobUuid);
        String tgt = JobBuilderSupport.getJobWorkingDir(dstConfig.getHdfsWorkingDirectory(), jobUuid);

        operations.add(new Opt(OptType.RENAME_FOLDER_IN_HDFS, new Object[] { src, tgt }));
    }

}
 
Example 11
Source File: MergeCuboidMapper.java    From Kylin with Apache License 2.0 5 votes vote down vote up
private CubeSegment findSegmentWithUuid(String jobID, CubeInstance cubeInstance) {
    for (CubeSegment segment : cubeInstance.getSegments()) {
        String lastBuildJobID = segment.getLastBuildJobID();
        if (lastBuildJobID != null && lastBuildJobID.equalsIgnoreCase(jobID)) {
            return segment;
        }
    }

    throw new IllegalStateException("No merging segment's last build job ID equals " + jobID);

}
 
Example 12
Source File: CubeMigrationCLI.java    From Kylin with Apache License 2.0 5 votes vote down vote up
private static void renameFoldersInHdfs(CubeInstance cube) {
    for (CubeSegment segment : cube.getSegments()) {

        String jobUuid = segment.getLastBuildJobID();
        String src = JobInstance.getJobWorkingDir(jobUuid, srcConfig.getHdfsWorkingDirectory());
        String tgt = JobInstance.getJobWorkingDir(jobUuid, dstConfig.getHdfsWorkingDirectory());

        operations.add(new Opt(OptType.RENAME_FOLDER_IN_HDFS, new Object[] { src, tgt }));
    }

}
 
Example 13
Source File: CubeMetaExtractor.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
private void addSegAndJob(CubeInstance cube) {
    if (includeSegments) {
        addRequired(CubeInstance.concatResourcePath(cube.getName()));
        for (CubeSegment segment : cube.getSegments(SegmentStatusEnum.READY)) {
            addRequired(CubeSegment.getStatisticsResourcePath(cube.getName(), segment.getUuid()));
            if (includeSegmentDetails) {
                for (String dictPat : segment.getDictionaryPaths()) {
                    addRequired(dictPat);
                }
                for (String snapshotPath : segment.getSnapshotPaths()) {
                    addRequired(snapshotPath);
                }
            }

            if (includeJobs) {
                String lastJobId = segment.getLastBuildJobID();
                if (StringUtils.isEmpty(lastJobId)) {
                    throw new RuntimeException("No job exist for segment :" + segment);
                } else {
                    try {
                        if (onlyJobOutput) {
                            addRequired(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + lastJobId);
                        } else {
                            ExecutablePO executablePO = executableDao.getJob(lastJobId);
                            addRequired(ResourceStore.EXECUTE_RESOURCE_ROOT + "/" + lastJobId);
                            addRequired(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + lastJobId);
                            for (ExecutablePO task : executablePO.getTasks()) {
                                addRequired(ResourceStore.EXECUTE_RESOURCE_ROOT + "/" + task.getUuid());
                                addRequired(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + task.getUuid());
                            }
                        }
                    } catch (PersistentException e) {
                        throw new RuntimeException("PersistentException", e);
                    }
                }
            }
        }
    } else {
        if (includeJobs) {
            logger.warn("It's useless to set includeJobs to true when includeSegments is set to false");
        }
        cube.setStatus(RealizationStatusEnum.DISABLED);
        cubesToTrimAndSave.add(cube);
    }
}
 
Example 14
Source File: CubeMetaExtractor.java    From kylin with Apache License 2.0 4 votes vote down vote up
private void addSegAndJob(CubeInstance cube) {
    if (includeSegments) {
        addRequired(CubeInstance.concatResourcePath(cube.getName()));
        for (CubeSegment segment : cube.getSegments(SegmentStatusEnum.READY)) {
            addRequired(CubeSegment.getStatisticsResourcePath(cube.getName(), segment.getUuid()));
            if (includeSegmentDetails) {
                for (String dictPat : segment.getDictionaryPaths()) {
                    addRequired(dictPat);
                }
                for (String snapshotPath : segment.getSnapshotPaths()) {
                    addRequired(snapshotPath);
                }
            }

            if (includeJobs) {
                String lastJobId = segment.getLastBuildJobID();
                if (StringUtils.isEmpty(lastJobId)) {
                    throw new RuntimeException("No job exist for segment :" + segment);
                } else {
                    try {
                        if (onlyJobOutput) {
                            addRequired(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + lastJobId);
                        } else {
                            ExecutablePO executablePO = executableDao.getJob(lastJobId);
                            addRequired(ResourceStore.EXECUTE_RESOURCE_ROOT + "/" + lastJobId);
                            addRequired(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + lastJobId);
                            for (ExecutablePO task : executablePO.getTasks()) {
                                addRequired(ResourceStore.EXECUTE_RESOURCE_ROOT + "/" + task.getUuid());
                                addRequired(ResourceStore.EXECUTE_OUTPUT_RESOURCE_ROOT + "/" + task.getUuid());
                            }
                        }
                    } catch (PersistentException e) {
                        throw new RuntimeException("PersistentException", e);
                    }
                }
            }
        }
    } else {
        if (includeJobs) {
            logger.warn("It's useless to set includeJobs to true when includeSegments is set to false");
        }
        cube.setStatus(RealizationStatusEnum.DISABLED);
        cubesToTrimAndSave.add(cube);
    }
}