org.apache.kylin.job.execution.ExecutableState Java Examples

The following examples show how to use org.apache.kylin.job.execution.ExecutableState. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: FetcherRunner.java    From kylin with Apache License 2.0 6 votes vote down vote up
protected void jobStateCount(String id) {
    final Output outputDigest = getExecutableManager().getOutputDigest(id);
    // logger.debug("Job id:" + id + " not runnable");
    if (outputDigest.getState() == ExecutableState.SUCCEED) {
        succeedJobs.add(id);
        nSUCCEED++;
    } else if (outputDigest.getState() == ExecutableState.ERROR) {
        nError++;
    } else if (outputDigest.getState() == ExecutableState.DISCARDED) {
        nDiscarded++;
    } else if (outputDigest.getState() == ExecutableState.STOPPED) {
        nStopped++;
    } else {
        if (fetchFailed) {
            getExecutableManager().forceKillJob(id);
            nError++;
        } else {
            nOthers++;
        }
    }
}
 
Example #2
Source File: BuildJobSubmitterTest.java    From kylin with Apache License 2.0 6 votes vote down vote up
void prepareTestCheckSegmentBuildJobFromMetadata() {
    CubeSegment cubeSegment = stubCubSegment(SegmentStatusEnum.NEW, 100L, 200L);
    CubeInstance cubeInstance = stubCubeInstance(cubeSegment);
    config = stubKylinConfig();
    when(cubeInstance.getConfig()).thenReturn(config);

    cubeManager = stubCubeManager(cubeInstance, false);

    Map<String, CubingJob> cubingJobMap = new HashMap<>();
    cubingJobMap.put(mockBuildJob1, stubCubingJob(ExecutableState.SUCCEED));
    cubingJobMap.put(mockBuildJob2, stubCubingJob(ExecutableState.DISCARDED));
    cubingJobMap.put(mockBuildJob3, stubCubingJob(ExecutableState.DISCARDED));
    cubingJobMap.put(mockBuildJob4, stubCubingJob(ExecutableState.ERROR));

    executableManager = stubExecutableManager(cubingJobMap);
    streamingCoordinator = stubStreamingCoordinator(config, cubeManager, executableManager);
    clusterManager = stubReceiverClusterManager(streamingCoordinator);
    when(streamingCoordinator.getClusterManager()).thenReturn(clusterManager);
}
 
Example #3
Source File: JobInfoConverterTest.java    From kylin with Apache License 2.0 6 votes vote down vote up
@Test
public void testParseToJobInstance4CuboidJob() {
    TestJob task = new TestJob();
    String jobId = UUID.randomUUID().toString();
    String cubeName = "cube1";
    task.setId(jobId);
    task.setParam(CubingExecutableUtil.CUBE_NAME, cubeName);
    Map<String, Output> outPutMap = Maps.newHashMap();
    DefaultOutput executeOutput = new DefaultOutput();
    executeOutput.setState(ExecutableState.READY);
    Map<String, String> extraMap = Maps.newHashMap();
    executeOutput.setExtra(extraMap);
    outPutMap.put(jobId, executeOutput);

    JobInstance instance3 = JobInfoConverter.parseToJobInstanceQuietly(task, outPutMap);
    // no exception thrown is expected
    assertEquals(jobId, instance3.getId());
    assertEquals(CubeBuildTypeEnum.BUILD, instance3.getType());
    assertEquals(cubeName, instance3.getRelatedCube());
    assertEquals(JobStatusEnum.PENDING, instance3.getStatus());
}
 
Example #4
Source File: ExecutableManager.java    From Kylin with Apache License 2.0 6 votes vote down vote up
public void updateJobOutput(String jobId, ExecutableState newStatus, Map<String, String> info, String output) {
    try {
        final ExecutableOutputPO jobOutput = executableDao.getJobOutput(jobId);
        Preconditions.checkArgument(jobOutput != null, "there is no related output for job id:" + jobId);
        ExecutableState oldStatus = ExecutableState.valueOf(jobOutput.getStatus());
        if (newStatus != null && oldStatus != newStatus) {
            if (!ExecutableState.isValidStateTransfer(oldStatus, newStatus)) {
                throw new IllegalStateTranferException("there is no valid state transfer from:" + oldStatus + " to:" + newStatus);
            }
            jobOutput.setStatus(newStatus.toString());
        }
        if (info != null) {
            jobOutput.setInfo(info);
        }
        if (output != null) {
            jobOutput.setContent(output);
        }
        executableDao.updateJobOutput(jobOutput);
        logger.info("job id:" + jobId + " from " + oldStatus + " to " + newStatus);
    } catch (PersistentException e) {
        logger.error("error change job:" + jobId + " to " + newStatus.toString());
        throw new RuntimeException(e);
    }
}
 
Example #5
Source File: JobInstanceExtractor.java    From kylin with Apache License 2.0 6 votes vote down vote up
private JobStatusEnum parseToJobStatus(ExecutableState state) {
    switch (state) {
    case READY:
        return JobStatusEnum.PENDING;
    case RUNNING:
        return JobStatusEnum.RUNNING;
    case ERROR:
        return JobStatusEnum.ERROR;
    case DISCARDED:
        return JobStatusEnum.DISCARDED;
    case SUCCEED:
        return JobStatusEnum.FINISHED;
    case STOPPED:
    default:
        throw new RuntimeException("invalid state:" + state);
    }
}
 
Example #6
Source File: DistributedScheduler.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
private void resumeAllRunningJobs() {
    for (final String id : executableManager.getAllJobIds()) {
        final Output output = executableManager.getOutput(id);
        AbstractExecutable executable = executableManager.getJob(id);
        if (output.getState() == ExecutableState.RUNNING && executable instanceof DefaultChainedExecutable) {
            try {
                if (!jobLock.isLocked(getLockPath(executable.getId()))) {
                    executableManager.resumeRunningJobForce(executable.getId());
                    fetcherPool.schedule(fetcher, 0, TimeUnit.SECONDS);
                }
            } catch (Exception e) {
                logger.error("resume the job " + id + " fail in server: " + serverName, e);
            }
        }
    }
}
 
Example #7
Source File: JobInfoConverter.java    From kylin with Apache License 2.0 6 votes vote down vote up
public static JobStatusEnum parseToJobStatus(ExecutableState state) {
    switch (state) {
    case READY:
        return JobStatusEnum.PENDING;
    case RUNNING:
        return JobStatusEnum.RUNNING;
    case ERROR:
        return JobStatusEnum.ERROR;
    case DISCARDED:
        return JobStatusEnum.DISCARDED;
    case SUCCEED:
        return JobStatusEnum.FINISHED;
    case STOPPED:
        return JobStatusEnum.STOPPED;
    default:
        throw new RuntimeException("invalid state:" + state);
    }
}
 
Example #8
Source File: DistributedScheduler.java    From kylin with Apache License 2.0 6 votes vote down vote up
private void resumeAllRunningJobs() {
    for (final String id : executableManager.getAllJobIds()) {
        final Output output = executableManager.getOutput(id);
        AbstractExecutable executable = executableManager.getJob(id);
        if (output.getState() == ExecutableState.RUNNING && executable instanceof DefaultChainedExecutable) {
            try {
                if (!jobLock.isLocked(getLockPath(executable.getId()))) {
                    executableManager.resumeRunningJobForce(executable.getId());
                    fetcherPool.schedule(fetcher, 0, TimeUnit.SECONDS);
                }
            } catch (Exception e) {
                logger.error("resume the job " + id + " fail in server: " + serverName, e);
            }
        }
    }
}
 
Example #9
Source File: JobService.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
public List<JobInstance> innerSearchCheckpointJobs(final String cubeName, final String jobName,
        final String projectName, final List<JobStatusEnum> statusList, final JobTimeFilterEnum timeFilter) {
    // TODO: use cache of jobs for this method
    // prepare time range
    Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
    calendar.setTime(new Date());
    long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
    long timeEndInMillis = Long.MAX_VALUE;
    Set<ExecutableState> states = convertStatusEnumToStates(statusList);
    final Map<String, Output> allOutputs = getExecutableManager().getAllOutputs(timeStartInMillis, timeEndInMillis);

    return Lists
            .newArrayList(FluentIterable
                    .from(innerSearchCheckpointJobs(cubeName, jobName, states, timeStartInMillis, timeEndInMillis,
                            allOutputs, false, projectName))
                    .transform(new Function<CheckpointExecutable, JobInstance>() {
                        @Override
                        public JobInstance apply(CheckpointExecutable checkpointExecutable) {
                            return JobInfoConverter.parseToJobInstanceQuietly(checkpointExecutable, allOutputs);
                        }
                    }));
}
 
Example #10
Source File: KylinHealthCheckJob.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
private void checkStoppedJob() throws Exception {
    reporter.log("## Cleanup stopped job");
    int staleJobThresholdInDays = config.getStaleJobThresholdInDays();
    long outdatedJobTimeCut = System.currentTimeMillis() - 1L * staleJobThresholdInDays * 24 * 60 * 60 * 1000;
    ExecutableDao executableDao = ExecutableDao.getInstance(config);
    // discard all expired ERROR or STOPPED jobs
    List<ExecutablePO> allExecutable = executableDao.getJobs();
    for (ExecutablePO executable : allExecutable) {
        long lastModified = executable.getLastModified();
        String jobStatus = executableDao.getJobOutput(executable.getUuid()).getStatus();
        if (lastModified < outdatedJobTimeCut && (ExecutableState.ERROR.toString().equals(jobStatus)
                || ExecutableState.STOPPED.toString().equals(jobStatus))) {
            // ExecutableManager.getInstance(config).discardJob(executable.getId());
            if (executable.getType().equals(CubingJob.class.getName())
                    || executable.getType().equals(CheckpointExecutable.class.getName())) {
                reporter.log("Should discard job: {}, which in ERROR/STOPPED state for {} days", executable.getId(),
                        staleJobThresholdInDays);
            } else {
                logger.warn("Unknown out of date job: {} with type: {}, which in ERROR/STOPPED state for {} days",
                        executable.getId(), executable.getType(), staleJobThresholdInDays);
            }
        }
    }
}
 
Example #11
Source File: JobInstanceExtractor.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
private JobStatusEnum parseToJobStatus(ExecutableState state) {
    switch (state) {
    case READY:
        return JobStatusEnum.PENDING;
    case RUNNING:
        return JobStatusEnum.RUNNING;
    case ERROR:
        return JobStatusEnum.ERROR;
    case DISCARDED:
        return JobStatusEnum.DISCARDED;
    case SUCCEED:
        return JobStatusEnum.FINISHED;
    case STOPPED:
    default:
        throw new RuntimeException("invalid state:" + state);
    }
}
 
Example #12
Source File: ExecutableManagerTest.java    From Kylin with Apache License 2.0 6 votes vote down vote up
@Test
public void test() throws Exception {
    assertNotNull(service);
    BaseTestExecutable executable = new SucceedTestExecutable();
    executable.setParam("test1", "test1");
    executable.setParam("test2", "test2");
    executable.setParam("test3", "test3");
    service.addJob(executable);
    List<AbstractExecutable> result = service.getAllExecutables();
    assertEquals(1, result.size());
    AbstractExecutable another = service.getJob(executable.getId());
    assertJobEqual(executable, another);

    service.updateJobOutput(executable.getId(), ExecutableState.RUNNING, null, "test output");
    assertJobEqual(executable, service.getJob(executable.getId()));
}
 
Example #13
Source File: CubingJob.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
protected void updateMetrics(ExecutableContext context, ExecuteResult result, ExecutableState state) {
    JobMetricsFacade.JobStatisticsResult jobStats = new JobMetricsFacade.JobStatisticsResult();
    jobStats.setWrapper(getSubmitter(), getProjectName(), CubingExecutableUtil.getCubeName(getParams()), getId(),
            getJobType(), getAlgorithm() == null ? "NULL" : getAlgorithm().toString());

    if (state == ExecutableState.SUCCEED) {
        jobStats.setJobStats(findSourceSizeBytes(), findCubeSizeBytes(), getDuration(), getMapReduceWaitTime(),
                getPerBytesTimeCost(findSourceSizeBytes(), getDuration()));
        if (CubingJobTypeEnum.getByName(getJobType()) == CubingJobTypeEnum.BUILD) {
            jobStats.setJobStepStats(getTaskDurationByName(ExecutableConstants.STEP_NAME_FACT_DISTINCT_COLUMNS),
                    getTaskDurationByName(ExecutableConstants.STEP_NAME_BUILD_DICTIONARY),
                    getTaskDurationByName(ExecutableConstants.STEP_NAME_BUILD_IN_MEM_CUBE),
                    getTaskDurationByName(ExecutableConstants.STEP_NAME_CONVERT_CUBOID_TO_HFILE));
        }
    } else if (state == ExecutableState.ERROR) {
        jobStats.setJobException(result.getThrowable() != null ? result.getThrowable() : new Exception());
    }
    JobMetricsFacade.updateMetrics(jobStats);
}
 
Example #14
Source File: BuildJobSubmitterTest.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
void beforeTestTraceEarliestSegmentBuildJob() {
    // prepare dependency
    CubeSegment cubeSegment = stubCubSegment(SegmentStatusEnum.NEW, 100L, 200L);
    CubeInstance cubeInstance = stubCubeInstance(cubeSegment);

    cubeManager = stubCubeManager(cubeInstance, false);
    config = stubKylinConfig();

    Map<String, CubingJob> cubingJobMap = new HashMap<>();
    cubingJobMap.put(mockBuildJob1, stubCubingJob(ExecutableState.SUCCEED));
    cubingJobMap.put(mockBuildJob2, stubCubingJob(ExecutableState.DISCARDED));
    cubingJobMap.put(mockBuildJob3, stubCubingJob(ExecutableState.ERROR));
    executableManager = stubExecutableManager(cubingJobMap);
    streamingCoordinator = stubStreamingCoordinator(config, cubeManager, executableManager);
    clusterManager = stubReceiverClusterManager(streamingCoordinator);
    when(streamingCoordinator.getClusterManager()).thenReturn(clusterManager);
}
 
Example #15
Source File: TableService.java    From kylin with Apache License 2.0 6 votes vote down vote up
public void calculateCardinalityIfNotPresent(String[] tables, String submitter, String prj) throws Exception {
    // calculate cardinality for Hive source
    ProjectInstance projectInstance = getProjectManager().getProject(prj);
    if (projectInstance == null || projectInstance.getSourceType() != ISourceAware.ID_HIVE){
        return;
    }
    TableMetadataManager metaMgr = getTableManager();
    ExecutableManager exeMgt = ExecutableManager.getInstance(getConfig());
    for (String table : tables) {
        TableExtDesc tableExtDesc = metaMgr.getTableExt(table, prj);
        String jobID = tableExtDesc.getJodID();
        if (null == jobID || ExecutableState.RUNNING != exeMgt.getOutput(jobID).getState()) {
            calculateCardinality(table, submitter, prj);
        }
    }
}
 
Example #16
Source File: JobService.java    From kylin with Apache License 2.0 6 votes vote down vote up
public List<JobInstance> innerSearchCheckpointJobs(final String cubeName, final String jobName,
        final String projectName, final List<JobStatusEnum> statusList, final JobTimeFilterEnum timeFilter) {
    // TODO: use cache of jobs for this method
    // prepare time range
    Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
    calendar.setTime(new Date());
    long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
    long timeEndInMillis = Long.MAX_VALUE;
    Set<ExecutableState> states = convertStatusEnumToStates(statusList);
    final Map<String, Output> allOutputs = getExecutableManager().getAllOutputs(timeStartInMillis, timeEndInMillis);

    return Lists
            .newArrayList(FluentIterable
                    .from(innerSearchCheckpointJobs(cubeName, jobName, states, timeStartInMillis, timeEndInMillis,
                            allOutputs, false, projectName))
                    .transform(new Function<CheckpointExecutable, JobInstance>() {
                        @Override
                        public JobInstance apply(CheckpointExecutable checkpointExecutable) {
                            return JobInfoConverter.parseToJobInstanceQuietly(checkpointExecutable, allOutputs);
                        }
                    }));
}
 
Example #17
Source File: JobService.java    From Kylin with Apache License 2.0 6 votes vote down vote up
private List<JobInstance> listCubeJobInstance(final String cubeName, final String projectName, List<JobStatusEnum> statusList) {
    Set<ExecutableState> states;
    if (statusList == null || statusList.isEmpty()) {
        states = EnumSet.allOf(ExecutableState.class);
    } else {
        states = Sets.newHashSet();
        for (JobStatusEnum status : statusList) {
            states.add(parseToExecutableState(status));
        }
    }
    return Lists.newArrayList(FluentIterable.from(listAllCubingJobs(cubeName, projectName, states)).transform(new Function<CubingJob, JobInstance>() {
        @Override
        public JobInstance apply(CubingJob cubingJob) {
            return parseToJobInstance(cubingJob);
        }
    }));
}
 
Example #18
Source File: DefaultSchedulerTest.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Test
public void testIllegalState() throws Exception {
    logger.info("testIllegalState");
    DefaultChainedExecutable job = new DefaultChainedExecutable();
    BaseTestExecutable task1 = new SucceedTestExecutable();
    BaseTestExecutable task2 = new RunningTestExecutable();
    job.addTask(task1);
    job.addTask(task2);
    execMgr.addJob(job);
    ExecutableManager.getInstance(KylinConfig.getInstanceFromEnv()).updateJobOutput(task2.getId(),
            ExecutableState.RUNNING, null, null);
    waitForJobFinish(job.getId(), MAX_WAIT_TIME);
    Assert.assertEquals(ExecutableState.ERROR, execMgr.getOutput(job.getId()).getState());
    Assert.assertEquals(ExecutableState.SUCCEED, execMgr.getOutput(task1.getId()).getState());
    Assert.assertEquals(ExecutableState.ERROR, execMgr.getOutput(task2.getId()).getState());
}
 
Example #19
Source File: DefaultSchedulerTest.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Test
public void testDiscard() throws Exception {
    logger.info("testDiscard");
    DefaultChainedExecutable job = new DefaultChainedExecutable();
    SelfStopExecutable task1 = new SelfStopExecutable();
    job.addTask(task1);
    execMgr.addJob(job);
    Thread.sleep(1100); // give time to launch job/task1 
    waitForJobStatus(job.getId(), ExecutableState.RUNNING, 500);
    execMgr.discardJob(job.getId());
    waitForJobFinish(job.getId(), MAX_WAIT_TIME);
    Assert.assertEquals(ExecutableState.DISCARDED, execMgr.getOutput(job.getId()).getState());
    Assert.assertEquals(ExecutableState.DISCARDED, execMgr.getOutput(task1.getId()).getState());
    task1.waitForDoWork();
}
 
Example #20
Source File: BuildCubeWithEngine.java    From kylin with Apache License 2.0 5 votes vote down vote up
private Boolean mergeSegment(String cubeName, long startDate, long endDate) throws Exception {
    CubeSegment segment = cubeManager.mergeSegments(cubeManager.getCube(cubeName), new TSRange(startDate, endDate),
            null, true);
    DefaultChainedExecutable job = EngineFactory.createBatchMergeJob(segment, "TEST");
    jobService.addJob(job);
    ExecutableState state = waitForJob(job.getId());
    return Boolean.valueOf(ExecutableState.SUCCEED == state);
}
 
Example #21
Source File: SparkExecutableLivy.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Override
protected ExecuteResult onResumed(String appId, ExecutableManager mgr) throws ExecuteException {
    Map<String, String> info = new HashMap<>();
    try {
        logger.info("livy spark_job_id:" + appId + " resumed");
        info.put(ExecutableConstants.SPARK_JOB_ID, appId);

        while (!isPaused() && !isDiscarded()) {
            String status = getAppState(appId);

            if (Strings.isNullOrEmpty(status) || LivyStateEnum.dead.name().equalsIgnoreCase(status)
                    || LivyStateEnum.error.name().equalsIgnoreCase(status)
                    || LivyStateEnum.shutting_down.name().equalsIgnoreCase(status)) {
                mgr.updateJobOutput(getId(), ExecutableState.ERROR, null, appId + " has failed");
                return new ExecuteResult(ExecuteResult.State.FAILED, appId + " has failed");
            }

            if (LivyStateEnum.success.name().equalsIgnoreCase(status)) {
                mgr.addJobInfo(getId(), info);
                return new ExecuteResult(ExecuteResult.State.SUCCEED, appId + " has finished");
            }

            Thread.sleep(5000);
        }

        killAppRetry(appId);

        if (isDiscarded()) {
            return new ExecuteResult(ExecuteResult.State.DISCARDED, appId + " is discarded");
        } else {
            return new ExecuteResult(ExecuteResult.State.STOPPED, appId + " is stopped");
        }

    } catch (Exception e) {
        logger.error("error run spark job:", e);
        return new ExecuteResult(ExecuteResult.State.ERROR, e.getLocalizedMessage());
    }

}
 
Example #22
Source File: NManualBuildAndQueryTest.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
private void buildTwoSegementAndMerge(String cubeName) throws Exception {
    KylinConfig config = KylinConfig.getInstanceFromEnv();
    CubeManager cubeMgr = CubeManager.getInstance(config);
    Assert.assertTrue(config.getHdfsWorkingDirectory().startsWith("file:"));

    // cleanup all segments first
    cleanupSegments(cubeName);

    /**
     * Round1. Build 2 segment
     */
    ExecutableState state;
    state = buildCuboid(cubeName, new SegmentRange.TSRange(dateToLong("2010-01-01"), dateToLong("2012-01-01")));
    Assert.assertEquals(ExecutableState.SUCCEED, state);

    state = buildCuboid(cubeName, new SegmentRange.TSRange(dateToLong("2012-01-01"), dateToLong("2015-01-01")));
    Assert.assertEquals(ExecutableState.SUCCEED, state);

    /**
     * Round2. Merge two segments
     */
    state = mergeSegments(cubeName, dateToLong("2010-01-01"), dateToLong("2015-01-01"), false);
    Assert.assertEquals(ExecutableState.SUCCEED, state);

    /**
     * validate cube segment info
     */
    CubeSegment firstSegment = cubeMgr.reloadCube(cubeName).getSegments().get(0);

    Assert.assertEquals(new SegmentRange.TSRange(dateToLong("2010-01-01"), dateToLong("2015-01-01")),
            firstSegment.getSegRange());
}
 
Example #23
Source File: JobService.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
public List<JobSearchResult> innerSearchCubingJobsV2(final String cubeName, final String jobName,
        final String projectName, final List<JobStatusEnum> statusList, final JobTimeFilterEnum timeFilter) {
    if (null == projectName) {
        aclEvaluate.checkIsGlobalAdmin();
    } else {
        aclEvaluate.checkProjectOperationPermission(projectName);
    }
    // prepare time range
    Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.ROOT);
    calendar.setTime(new Date());
    long timeStartInMillis = getTimeStartInMillis(calendar, timeFilter);
    long timeEndInMillis = Long.MAX_VALUE;
    Set<ExecutableState> states = convertStatusEnumToStates(statusList);
    final Map<String, ExecutableOutputPO> allOutputDigests = getExecutableManager()
            .getAllOutputDigests(timeStartInMillis, timeEndInMillis);
    return Lists
            .newArrayList(FluentIterable
                    .from(innerSearchCubingJobsV2(cubeName, jobName, states, timeStartInMillis, timeEndInMillis,
                            allOutputDigests, false, projectName))
                    .transform(new Function<CubingJob, JobSearchResult>() {
                        @Override
                        public JobSearchResult apply(CubingJob cubingJob) {
                            return JobInfoConverter.parseToJobSearchResult(cubingJob, allOutputDigests);
                        }
                    }).filter(new Predicate<JobSearchResult>() {
                        @Override
                        public boolean apply(@Nullable JobSearchResult input) {
                            return input != null;
                        }
                    }));
}
 
Example #24
Source File: DefaultSchedulerTest.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Test
public void testSingleTaskJob() throws Exception {
    logger.info("testSingleTaskJob");
    DefaultChainedExecutable job = new DefaultChainedExecutable();
    BaseTestExecutable task1 = new SucceedTestExecutable();
    job.addTask(task1);
    execMgr.addJob(job);
    waitForJobFinish(job.getId(), MAX_WAIT_TIME);
    Assert.assertEquals(ExecutableState.SUCCEED, execMgr.getOutput(job.getId()).getState());
    Assert.assertEquals(ExecutableState.SUCCEED, execMgr.getOutput(task1.getId()).getState());
}
 
Example #25
Source File: BuildCubeWithEngine.java    From kylin with Apache License 2.0 5 votes vote down vote up
protected ExecutableState waitForJob(String jobId) {
    while (true) {
        AbstractExecutable job = jobService.getJob(jobId);
        if (job.getStatus() == ExecutableState.SUCCEED || job.getStatus() == ExecutableState.ERROR) {
            return job.getStatus();
        } else {
            try {
                Thread.sleep(5000);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }
    }
}
 
Example #26
Source File: JobServiceTest.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Test
public void testExceptionOnLostJobOutput() {
    ExecutableManager manager = ExecutableManager.getInstance(jobService.getConfig());
    AbstractExecutable executable = new TestJob();
    manager.addJob(executable);
    List<CubingJob> jobs = jobService.innerSearchCubingJobs("cube", "jobName",
            Collections.<ExecutableState> emptySet(), 0, Long.MAX_VALUE, Collections.<String, Output> emptyMap(),
            true, "project");
    Assert.assertEquals(0, jobs.size());
}
 
Example #27
Source File: DefaultScheduler.java    From Kylin with Apache License 2.0 5 votes vote down vote up
@Override
public void run() {
    // logger.debug("Job Fetcher is running...");
    Map<String, Executable> runningJobs = context.getRunningJobs();
    if (runningJobs.size() >= jobEngineConfig.getMaxConcurrentJobLimit()) {
        logger.warn("There are too many jobs running, Job Fetch will wait until next schedule time");
        return;
    }

    int nRunning = 0, nReady = 0, nOthers = 0;
    for (final String id : executableManager.getAllJobIds()) {
        if (runningJobs.containsKey(id)) {
            // logger.debug("Job id:" + id + " is already running");
            nRunning++;
            continue;
        }
        final Output output = executableManager.getOutput(id);
        if ((output.getState() != ExecutableState.READY)) {
            // logger.debug("Job id:" + id + " not runnable");
            nOthers++;
            continue;
        }
        nReady++;
        AbstractExecutable executable = executableManager.getJob(id);
        String jobDesc = executable.toString();
        logger.info(jobDesc + " prepare to schedule");
        try {
            context.addRunningJob(executable);
            jobPool.execute(new JobRunner(executable));
            logger.info(jobDesc + " scheduled");
        } catch (Exception ex) {
            context.removeRunningJob(executable);
            logger.warn(jobDesc + " fail to schedule", ex);
        }
    }
    logger.info("Job Fetcher: " + nRunning + " running, " + runningJobs.size() + " actual running, " + nReady + " ready, " + nOthers + " others");
}
 
Example #28
Source File: MailNotificationUtil.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
private static String getMailTemplateKey(ExecutableState state) {
    switch (state) {
    case ERROR:
        return JOB_ERROR;
    case DISCARDED:
        return JOB_DISCARD;
    case SUCCEED:
        return JOB_SUCCEED;
    default:
        return null;
    }
}
 
Example #29
Source File: JobInfoConverterTest.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Test
public void testParseToJobStatusReturnsJobStatusFinished() {
    ExecutableState executableState = ExecutableState.SUCCEED;
    JobStatusEnum jobStatusEnum = JobInfoConverter.parseToJobStatus(executableState);

    assertEquals(4, jobStatusEnum.getCode());
    assertEquals(JobStatusEnum.FINISHED, jobStatusEnum);
}
 
Example #30
Source File: JobInfoConverterTest.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Test
public void testParseToJobStepStatusReturnsJobStepStatusRunning() {
    ExecutableState executableState = ExecutableState.RUNNING;
    JobStepStatusEnum jobStepStatusEnum = JobInfoConverter.parseToJobStepStatus(executableState);

    assertEquals(2, jobStepStatusEnum.getCode());
    assertFalse(jobStepStatusEnum.isComplete());
    assertFalse(jobStepStatusEnum.isRunable());
    assertEquals(JobStepStatusEnum.RUNNING, jobStepStatusEnum);
}