org.apache.kylin.job.exception.ExecuteException Java Examples

The following examples show how to use org.apache.kylin.job.exception.ExecuteException. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CreateFlatHiveTableStep.java    From kylin with Apache License 2.0 6 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    stepLogger.setILogListener((infoKey, info) -> {
                // only care two properties here
                if (ExecutableConstants.YARN_APP_ID.equals(infoKey)
                        || ExecutableConstants.YARN_APP_URL.equals(infoKey)) {
                    getManager().addJobInfo(getId(), info);
                }
            }
    );
    KylinConfig config = getCubeSpecificConfig();
    try {
        createFlatHiveTable(config);
        return new ExecuteResult(ExecuteResult.State.SUCCEED, stepLogger.getBufferedLog());

    } catch (Exception e) {
        logger.error("job:" + getId() + " execute finished with exception", e);
        return new ExecuteResult(ExecuteResult.State.ERROR, stepLogger.getBufferedLog(), e);
    }
}
 
Example #2
Source File: HDFSPathGarbageCollectionStep.java    From kylin with Apache License 2.0 6 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    try {
        config = new JobEngineConfig(context.getConfig());
        List<String> toDeletePaths = getDeletePaths();
        dropHdfsPathOnCluster(toDeletePaths, HadoopUtil.getWorkingFileSystem());

        if (StringUtils.isNotEmpty(context.getConfig().getHBaseClusterFs())) {
            dropHdfsPathOnCluster(toDeletePaths, FileSystem.get(HBaseConnection.getCurrentHBaseConfiguration()));
        }
    } catch (IOException e) {
        logger.error("job:" + getId() + " execute finished with exception", e);
        output.append("\n").append(e.getLocalizedMessage());
    }

    return new ExecuteResult(ExecuteResult.State.SUCCEED, output.toString());
}
 
Example #3
Source File: CreateFlatHiveTableByLivyStep.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    stepLogger.setILogListener((infoKey, info) -> {
                // only care two properties here
                if (ExecutableConstants.YARN_APP_ID.equals(infoKey)
                        || ExecutableConstants.YARN_APP_URL.equals(infoKey)) {
                    getManager().addJobInfo(getId(), info);
                }
            }
    );
    KylinConfig config = getCubeSpecificConfig();
    try {
        createFlatHiveTable(config);
        return new ExecuteResult(ExecuteResult.State.SUCCEED, stepLogger.getBufferedLog());

    } catch (Exception e) {
        logger.error("job:" + getId() + " execute finished with exception", e);
        return new ExecuteResult(ExecuteResult.State.ERROR, stepLogger.getBufferedLog(), e);
    }
}
 
Example #4
Source File: DefaultChainedExecutable.java    From kylin with Apache License 2.0 6 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    List<? extends Executable> executables = getTasks();
    final int size = executables.size();
    for (int i = 0; i < size; ++i) {
        Executable subTask = executables.get(i);
        ExecutableState state = subTask.getStatus();
        if (state == ExecutableState.RUNNING) {
            // there is already running subtask, no need to start a new subtask
            break;
        } else if (state == ExecutableState.STOPPED) {
            // the job is paused
            break;
        } else if (state == ExecutableState.ERROR) {
            throw new IllegalStateException(
                    "invalid subtask state, subtask:" + subTask.getName() + ", state:" + subTask.getStatus());
        }
        if (subTask.isRunnable()) {
            return subTask.execute(context);
        }
    }
    return new ExecuteResult(ExecuteResult.State.SUCCEED);
}
 
Example #5
Source File: CreateFlatHiveTableStep.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    stepLogger.setILogListener((infoKey, info) -> {
                // only care two properties here
                if (ExecutableConstants.YARN_APP_ID.equals(infoKey)
                        || ExecutableConstants.YARN_APP_URL.equals(infoKey)) {
                    getManager().addJobInfo(getId(), info);
                }
            }
    );
    KylinConfig config = getCubeSpecificConfig();
    try {
        createFlatHiveTable(config);
        return new ExecuteResult(ExecuteResult.State.SUCCEED, stepLogger.getBufferedLog());

    } catch (Exception e) {
        logger.error("job:" + getId() + " execute finished with exception", e);
        return new ExecuteResult(ExecuteResult.State.ERROR, stepLogger.getBufferedLog(), e);
    }
}
 
Example #6
Source File: UpdateCubeInfoAfterCheckpointStep.java    From kylin with Apache License 2.0 6 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
    final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()));

    Set<Long> recommendCuboids = cube.getCuboidsRecommend();
    try {
        List<CubeSegment> newSegments = cube.getSegments(SegmentStatusEnum.READY_PENDING);
        Map<Long, Long> recommendCuboidsWithStats = CuboidStatsReaderUtil
                .readCuboidStatsFromSegments(recommendCuboids, newSegments);
        if (recommendCuboidsWithStats == null) {
            throw new RuntimeException("Fail to get statistics info for recommended cuboids after optimization!!!");
        }
        cubeManager.promoteCheckpointOptimizeSegments(cube, recommendCuboidsWithStats,
                newSegments.toArray(new CubeSegment[newSegments.size()]));
        return new ExecuteResult();
    } catch (Exception e) {
        logger.error("fail to update cube after build", e);
        return ExecuteResult.createError(e);
    }
}
 
Example #7
Source File: UpdateCubeInfoAfterCheckpointStep.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
    final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()));

    Set<Long> recommendCuboids = cube.getCuboidsRecommend();
    try {
        List<CubeSegment> newSegments = cube.getSegments(SegmentStatusEnum.READY_PENDING);
        Map<Long, Long> recommendCuboidsWithStats = CuboidStatsReaderUtil
                .readCuboidStatsFromSegments(recommendCuboids, newSegments);
        if (recommendCuboidsWithStats == null) {
            throw new RuntimeException("Fail to get statistics info for recommended cuboids after optimization!!!");
        }
        cubeManager.promoteCheckpointOptimizeSegments(cube, recommendCuboidsWithStats,
                newSegments.toArray(new CubeSegment[newSegments.size()]));
        return new ExecuteResult();
    } catch (Exception e) {
        logger.error("fail to update cube after build", e);
        return ExecuteResult.createError(e);
    }
}
 
Example #8
Source File: CreateFlatHiveTableByLivyStep.java    From kylin with Apache License 2.0 6 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    stepLogger.setILogListener((infoKey, info) -> {
                // only care two properties here
                if (ExecutableConstants.YARN_APP_ID.equals(infoKey)
                        || ExecutableConstants.YARN_APP_URL.equals(infoKey)) {
                    getManager().addJobInfo(getId(), info);
                }
            }
    );
    KylinConfig config = getCubeSpecificConfig();
    try {
        createFlatHiveTable(config);
        return new ExecuteResult(ExecuteResult.State.SUCCEED, stepLogger.getBufferedLog());

    } catch (Exception e) {
        logger.error("job:" + getId() + " execute finished with exception", e);
        return new ExecuteResult(ExecuteResult.State.ERROR, stepLogger.getBufferedLog(), e);
    }
}
 
Example #9
Source File: NSparkExecutable.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
String dumpArgs() throws ExecuteException {
    File tmpDir = null;
    try {
        tmpDir = File.createTempFile(MetadataConstants.P_SEGMENT_IDS, "");
        FileUtils.writeByteArrayToFile(tmpDir, JsonUtil.writeValueAsBytes(getParams()));

        logger.info("Spark job args json is : {}.", JsonUtil.writeValueAsString(getParams()));
        return tmpDir.getCanonicalPath();
    } catch (IOException e) {
        if (tmpDir != null && tmpDir.exists()) {
            try {
                Files.delete(tmpDir.toPath());
            } catch (IOException e1) {
                throw new ExecuteException(
                        "Write cuboidLayoutIds failed: Error for delete file " + tmpDir.getPath(), e1);
            }
        }
        throw new ExecuteException("Write cuboidLayoutIds failed: ", e);
    }
}
 
Example #10
Source File: NSparkUpdateMetaAndCleanupAfterMergeStep.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    String cubeId = getParam(MetadataConstants.P_CUBE_ID);
    String[] segments = StringUtils.split(getParam(MetadataConstants.P_SEGMENT_NAMES), ",");
    KylinConfig config = KylinConfig.getInstanceFromEnv();
    CubeInstance cube = CubeManager.getInstance(config).getCubeByUuid(cubeId);

    updateMetadataAfterMerge(cubeId);

    for (String segmentName : segments) {
        String path = config.getHdfsWorkingDirectory() + cube.getProject() + "/parquet/" + cube.getName() + "/" + segmentName;
        try {
            HadoopUtil.deletePath(HadoopUtil.getCurrentConfiguration(), new Path(path));
        } catch (IOException e) {
            throw new ExecuteException("Can not delete segment: " + segmentName + ", in cube: " + cube.getName());
        }
    }

    return ExecuteResult.createSucceed();
}
 
Example #11
Source File: SelfStopExecutable.java    From kylin with Apache License 2.0 6 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    doingWork = true;
    try {
        for (int i = 0; i < 60; i++) {
            sleepOneSecond();
            
            if (isDiscarded())
                return new ExecuteResult(ExecuteResult.State.STOPPED, "stopped");
        }
            
        return new ExecuteResult();
    } finally {
        doingWork = false;
    }
}
 
Example #12
Source File: CopyDictionaryStep.java    From kylin with Apache License 2.0 6 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    final CubeManager mgr = CubeManager.getInstance(context.getConfig());
    final CubeInstance cube = mgr.getCube(CubingExecutableUtil.getCubeName(this.getParams())).latestCopyForWrite();
    final CubeSegment optimizeSegment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));

    CubeSegment oldSegment = optimizeSegment.getCubeInstance().getOriginalSegmentToOptimize(optimizeSegment);
    Preconditions.checkNotNull(oldSegment,
            "cannot find the original segment to be optimized by " + optimizeSegment);

    // --- Copy dictionary
    optimizeSegment.getDictionaries().putAll(oldSegment.getDictionaries());
    optimizeSegment.getSnapshots().putAll(oldSegment.getSnapshots());
    optimizeSegment.getRowkeyStats().addAll(oldSegment.getRowkeyStats());

    try {
        CubeUpdate cubeBuilder = new CubeUpdate(cube);
        cubeBuilder.setToUpdateSegs(optimizeSegment);
        mgr.updateCube(cubeBuilder);
    } catch (IOException e) {
        logger.error("fail to merge dictionary or lookup snapshots", e);
        return ExecuteResult.createError(e);
    }

    return new ExecuteResult();
}
 
Example #13
Source File: CopyDictionaryStep.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    final CubeManager mgr = CubeManager.getInstance(context.getConfig());
    final CubeInstance cube = mgr.getCube(CubingExecutableUtil.getCubeName(this.getParams())).latestCopyForWrite();
    final CubeSegment optimizeSegment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));

    CubeSegment oldSegment = optimizeSegment.getCubeInstance().getOriginalSegmentToOptimize(optimizeSegment);
    Preconditions.checkNotNull(oldSegment,
            "cannot find the original segment to be optimized by " + optimizeSegment);

    // --- Copy dictionary
    optimizeSegment.getDictionaries().putAll(oldSegment.getDictionaries());
    optimizeSegment.getSnapshots().putAll(oldSegment.getSnapshots());
    optimizeSegment.getRowkeyStats().addAll(oldSegment.getRowkeyStats());

    try {
        CubeUpdate cubeBuilder = new CubeUpdate(cube);
        cubeBuilder.setToUpdateSegs(optimizeSegment);
        mgr.updateCube(cubeBuilder);
    } catch (IOException e) {
        logger.error("fail to merge dictionary or lookup snapshots", e);
        return ExecuteResult.createError(e);
    }

    return new ExecuteResult();
}
 
Example #14
Source File: DefaultChainedExecutable.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    List<? extends Executable> executables = getTasks();
    final int size = executables.size();
    for (int i = 0; i < size; ++i) {
        Executable subTask = executables.get(i);
        ExecutableState state = subTask.getStatus();
        if (state == ExecutableState.RUNNING) {
            // there is already running subtask, no need to start a new subtask
            break;
        } else if (state == ExecutableState.STOPPED) {
            // the job is paused
            break;
        } else if (state == ExecutableState.ERROR) {
            throw new IllegalStateException(
                    "invalid subtask state, subtask:" + subTask.getName() + ", state:" + subTask.getStatus());
        }
        if (subTask.isRunnable()) {
            return subTask.execute(context);
        }
    }
    return new ExecuteResult(ExecuteResult.State.SUCCEED);
}
 
Example #15
Source File: UpdateCubeInfoAfterBuildStep.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
    final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()))
            .latestCopyForWrite();
    final CubeSegment segment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));

    CubingJob cubingJob = (CubingJob) getManager().getJob(CubingExecutableUtil.getCubingJobId(this.getParams()));
    long sourceCount = cubingJob.findSourceRecordCount();
    long sourceSizeBytes = cubingJob.findSourceSizeBytes();
    long cubeSizeBytes = cubingJob.findCubeSizeBytes();

    KylinConfig config = KylinConfig.getInstanceFromEnv();
    List<Double> cuboidEstimateRatio = cubingJob.findEstimateRatio(segment, config);

    segment.setLastBuildJobID(CubingExecutableUtil.getCubingJobId(this.getParams()));
    segment.setLastBuildTime(System.currentTimeMillis());
    segment.setSizeKB(cubeSizeBytes / 1024);
    segment.setInputRecords(sourceCount);
    segment.setInputRecordsSize(sourceSizeBytes);
    segment.setEstimateRatio(cuboidEstimateRatio);

    try {
        deleteDictionaryIfNeeded(segment);
        saveExtSnapshotIfNeeded(cubeManager, cube, segment);
        updateSegment(segment);

        cubeManager.promoteNewlyBuiltSegments(cube, segment);
        return new ExecuteResult();
    } catch (IOException e) {
        logger.error("fail to update cube after build", e);
        return ExecuteResult.createError(e);
    }
}
 
Example #16
Source File: GarbageCollectionStep.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    KylinConfig config = context.getConfig();
    StringBuffer output = new StringBuffer();
    try {
        output.append(cleanUpIntermediateFlatTable(config));
    } catch (IOException e) {
        logger.error("job:" + getId() + " execute finished with exception", e);
        return ExecuteResult.createError(e);
    }

    return new ExecuteResult(ExecuteResult.State.SUCCEED, output.toString());
}
 
Example #17
Source File: SparkExecutableLivy.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Override
protected ExecuteResult onResumed(String appId, ExecutableManager mgr) throws ExecuteException {
    Map<String, String> info = new HashMap<>();
    try {
        logger.info("livy spark_job_id:" + appId + " resumed");
        info.put(ExecutableConstants.SPARK_JOB_ID, appId);

        while (!isPaused() && !isDiscarded()) {
            String status = getAppState(appId);

            if (Strings.isNullOrEmpty(status) || LivyStateEnum.dead.name().equalsIgnoreCase(status)
                    || LivyStateEnum.error.name().equalsIgnoreCase(status)
                    || LivyStateEnum.shutting_down.name().equalsIgnoreCase(status)) {
                mgr.updateJobOutput(getId(), ExecutableState.ERROR, null, appId + " has failed");
                return new ExecuteResult(ExecuteResult.State.FAILED, appId + " has failed");
            }

            if (LivyStateEnum.success.name().equalsIgnoreCase(status)) {
                mgr.addJobInfo(getId(), info);
                return new ExecuteResult(ExecuteResult.State.SUCCEED, appId + " has finished");
            }

            Thread.sleep(5000);
        }

        killAppRetry(appId);

        if (isDiscarded()) {
            return new ExecuteResult(ExecuteResult.State.DISCARDED, appId + " is discarded");
        } else {
            return new ExecuteResult(ExecuteResult.State.STOPPED, appId + " is stopped");
        }

    } catch (Exception e) {
        logger.error("error run spark job:", e);
        return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
    }

}
 
Example #18
Source File: MergeDictionaryStep.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    final CubeManager mgr = CubeManager.getInstance(context.getConfig());
    final CubeInstance cube = mgr.getCube(CubingExecutableUtil.getCubeName(this.getParams()));
    final CubeSegment newSegment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));
    final List<CubeSegment> mergingSegments = getMergingSegments(cube);
    KylinConfig conf = cube.getConfig();

    Collections.sort(mergingSegments);

    try {
        checkLookupSnapshotsMustIncremental(mergingSegments);

        // work on copy instead of cached objects
        CubeInstance cubeCopy = cube.latestCopyForWrite();
        CubeSegment newSegCopy = cubeCopy.getSegmentById(newSegment.getUuid());
        
        makeDictForNewSegment(conf, cubeCopy, newSegCopy, mergingSegments);
        makeSnapshotForNewSegment(cubeCopy, newSegCopy, mergingSegments);

        CubeUpdate update = new CubeUpdate(cubeCopy);
        update.setToUpdateSegs(newSegCopy);
        mgr.updateCube(update);
        return ExecuteResult.createSucceed();
    } catch (IOException e) {
        logger.error("fail to merge dictionary or lookup snapshots", e);
        return ExecuteResult.createError(e);
    }
}
 
Example #19
Source File: LookupSnapshotToMetaStoreStep.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    KylinConfig kylinConfig = context.getConfig();
    CubeManager cubeManager = CubeManager.getInstance(kylinConfig);
    TableMetadataManager metaMgr = TableMetadataManager.getInstance(kylinConfig);
    SnapshotManager snapshotMgr = SnapshotManager.getInstance(kylinConfig);
    CubeInstance cube = cubeManager.getCube(LookupExecutableUtil.getCubeName(this.getParams()));
    List<String> segmentIDs = LookupExecutableUtil.getSegments(this.getParams());
    String lookupTableName = LookupExecutableUtil.getLookupTableName(this.getParams());
    CubeDesc cubeDesc = cube.getDescriptor();
    try {
        TableDesc tableDesc = metaMgr.getTableDesc(lookupTableName, cube.getProject());
        IReadableTable hiveTable = SourceManager.createReadableTable(tableDesc, null);
        logger.info("take snapshot for table:" + lookupTableName);
        SnapshotTable snapshot = snapshotMgr.buildSnapshot(hiveTable, tableDesc, cube.getConfig());

        logger.info("update snapshot path to cube metadata");
        if (cubeDesc.isGlobalSnapshotTable(lookupTableName)) {
            LookupExecutableUtil.updateSnapshotPathToCube(cubeManager, cube, lookupTableName,
                    snapshot.getResourcePath());
        } else {
            LookupExecutableUtil.updateSnapshotPathToSegments(cubeManager, cube, segmentIDs, lookupTableName,
                    snapshot.getResourcePath());
        }
        return new ExecuteResult();
    } catch (IOException e) {
        logger.error("fail to build snapshot for:" + lookupTableName, e);
        return ExecuteResult.createError(e);
    }
}
 
Example #20
Source File: FiveSecondSucceedTestExecutable.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    try {
        Thread.sleep(5000);
    } catch (InterruptedException e) {
        Thread.currentThread().interrupt();
    }
    return ExecuteResult.createSucceed();
}
 
Example #21
Source File: UpdateCubeInfoAfterOptimizeStep.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
    final CubeInstance cube = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams()));
    final CubeSegment segment = cube.getSegmentById(CubingExecutableUtil.getSegmentId(this.getParams()));

    CubeSegment originalSegment = cube.getOriginalSegmentToOptimize(segment);
    long sourceCount = originalSegment.getInputRecords();
    long sourceSizeBytes = originalSegment.getInputRecordsSize();

    CubingJob cubingJob = (CubingJob) getManager().getJob(CubingExecutableUtil.getCubingJobId(this.getParams()));
    long cubeSizeBytes = cubingJob.findCubeSizeBytes();

    segment.setLastBuildJobID(CubingExecutableUtil.getCubingJobId(this.getParams()));
    segment.setLastBuildTime(System.currentTimeMillis());
    segment.setSizeKB(cubeSizeBytes / 1024);
    segment.setInputRecords(sourceCount);
    segment.setInputRecordsSize(sourceSizeBytes);
    segment.setDimensionRangeInfoMap(originalSegment.getDimensionRangeInfoMap());

    try {
        cubeManager.promoteNewlyOptimizeSegments(cube, segment);
        return new ExecuteResult();
    } catch (IOException e) {
        logger.error("fail to update cube after build", e);
        return ExecuteResult.createError(e);
    }
}
 
Example #22
Source File: FailedTestExecutable.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    try {
        Thread.sleep(1000);
    } catch (InterruptedException e) {
    }
    return new ExecuteResult(ExecuteResult.State.FAILED, "failed");
}
 
Example #23
Source File: ShellExecutable.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    try {
        logger.info("executing:" + getCmd());
        final PatternedLogger patternedLogger = new PatternedLogger(logger);
        final Pair<Integer, String> result = context.getConfig().getCliCommandExecutor().execute(getCmd(), patternedLogger);
        getManager().addJobInfo(getId(), patternedLogger.getInfo());
        return result.getFirst() == 0 ? new ExecuteResult(ExecuteResult.State.SUCCEED, result.getSecond())
                : ExecuteResult.createFailed(new ShellException(result.getSecond()));
    } catch (IOException e) {
        logger.error("job:" + getId() + " execute finished with exception", e);
        return ExecuteResult.createError(e);
    }
}
 
Example #24
Source File: FlinkExecutable.java    From kylin with Apache License 2.0 5 votes vote down vote up
private ExecuteResult onResumed(String appId, ExecutableManager mgr) throws ExecuteException {
    Map<String, String> info = new HashMap<>();
    try {
        logger.info("flink_job_id:" + appId + " resumed");
        info.put(ExecutableConstants.FLINK_JOB_ID, appId);

        while (!isPaused() && !isDiscarded()) {
            String status = getAppState(appId);

            if (status.equals("FAILED") || status.equals("KILLED")) {
                mgr.updateJobOutput(getId(), ExecutableState.ERROR, null, appId + " has failed");
                return new ExecuteResult(ExecuteResult.State.FAILED, appId + " has failed");
            }

            if (status.equals("SUCCEEDED")) {
                mgr.addJobInfo(getId(), info);
                return new ExecuteResult(ExecuteResult.State.SUCCEED, appId + " has finished");
            }

            Thread.sleep(5000);

        }

        killAppRetry(appId);

        if (isDiscarded()) {
            return new ExecuteResult(ExecuteResult.State.DISCARDED, appId + " is discarded");
        } else {
            return new ExecuteResult(ExecuteResult.State.STOPPED, appId + " is stopped");
        }
    } catch (Exception e) {
        logger.error("error run spark job:", e);
        return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
    }
}
 
Example #25
Source File: SparkExecutable.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
protected void dumpMetadata(CubeSegment segment, List<CubeSegment> mergingSeg) throws ExecuteException {
    try {
        if (mergingSeg == null || mergingSeg.size() == 0) {
            attachSegmentMetadataWithDict(segment);
        } else {
            List<CubeSegment> allRelatedSegs = new ArrayList();
            allRelatedSegs.add(segment);
            allRelatedSegs.addAll(mergingSeg);
            attachSegmentsMetadataWithDict(allRelatedSegs);
        }
    } catch (IOException e) {
        throw new ExecuteException("meta dump failed");
    }
}
 
Example #26
Source File: SparkExecutable.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
protected ExecuteResult onResumed(String appId, ExecutableManager mgr) throws ExecuteException {
    Map<String, String> info = new HashMap<>();
    try {
        logger.info("spark_job_id:" + appId + " resumed");
        info.put(ExecutableConstants.SPARK_JOB_ID, appId);

        while (!isPaused() && !isDiscarded()) {
            String status = getAppState(appId);

            if (status.equals("FAILED") || status.equals("KILLED")) {
                mgr.updateJobOutput(getId(), ExecutableState.ERROR, null, appId + " has failed");
                return new ExecuteResult(ExecuteResult.State.FAILED, appId + " has failed");
            }

            if (status.equals("SUCCEEDED")) {
                mgr.addJobInfo(getId(), info);
                return new ExecuteResult(ExecuteResult.State.SUCCEED, appId + " has finished");
            }

            Thread.sleep(5000);
        }

        killAppRetry(appId);

        if (isDiscarded()) {
            return new ExecuteResult(ExecuteResult.State.DISCARDED, appId + " is discarded");
        } else {
            return new ExecuteResult(ExecuteResult.State.STOPPED, appId + " is stopped");
        }

    } catch (Exception e) {
        logger.error("error run spark job:", e);
        return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
    }

}
 
Example #27
Source File: MergeOffsetStep.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    final CubeManager cubeManager = CubeManager.getInstance(context.getConfig());
    final CubeInstance cubeCopy = cubeManager.getCube(CubingExecutableUtil.getCubeName(this.getParams())).latestCopyForWrite();
    final String segmentId = CubingExecutableUtil.getSegmentId(this.getParams());
    final CubeSegment segCopy = cubeCopy.getSegmentById(segmentId);

    Preconditions.checkNotNull(segCopy, "Cube segment '" + segmentId + "' not found.");
    Segments<CubeSegment> mergingSegs = cubeCopy.getMergingSegments(segCopy);

    Preconditions.checkArgument(mergingSegs.size() > 0, "Merging segment not exist.");

    Collections.sort(mergingSegs);
    final CubeSegment first = mergingSegs.get(0);
    final CubeSegment last = mergingSegs.get(mergingSegs.size() - 1);

    segCopy.setSegRange(new SegmentRange(first.getSegRange().start, last.getSegRange().end));
    segCopy.setSourcePartitionOffsetStart(first.getSourcePartitionOffsetStart());
    segCopy.setSourcePartitionOffsetEnd(last.getSourcePartitionOffsetEnd());

    segCopy.setTSRange(new TSRange(mergingSegs.getTSStart(), mergingSegs.getTSEnd()));

    CubeUpdate update = new CubeUpdate(cubeCopy);
    update.setToUpdateSegs(segCopy);
    try {
        cubeManager.updateCube(update);
        return ExecuteResult.createSucceed();
    } catch (IOException e) {
        logger.error("fail to update cube segment offset", e);
        return ExecuteResult.createError(e);
    }
}
 
Example #28
Source File: UpdateSnapshotCacheForQueryServersStepTest.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Test
public void testExecute() throws ExecuteException {
    UpdateSnapshotCacheForQueryServersStep step = new UpdateSnapshotCacheForQueryServersStep();
    ExecuteResult result = step.doWork(new DefaultContext(Maps.<String, Executable>newConcurrentMap(), kylinConfig));
    System.out.println(result.output());
    assertTrue(result.succeed());
}
 
Example #29
Source File: ErrorTestExecutable.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    try {
        Thread.sleep(100);
    } catch (InterruptedException e) {
    }
    throw new RuntimeException("test error");
}
 
Example #30
Source File: RunningTestExecutable.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
protected ExecuteResult doWork(ExecutableContext context) throws ExecuteException {
    try {
        Thread.sleep(1000);
    } catch (InterruptedException e) {
    }
    return ExecuteResult.createSucceed();
}