org.apache.helix.task.WorkflowConfig Java Examples

The following examples show how to use org.apache.helix.task.WorkflowConfig. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TaskTestUtil.java    From helix with Apache License 2.0 6 votes vote down vote up
public static JobQueue.Builder buildRecurrentJobQueue(String jobQueueName, int delayStart,
    int recurrenceInSeconds, TargetState targetState) {
  WorkflowConfig.Builder workflowCfgBuilder = new WorkflowConfig.Builder(jobQueueName);
  workflowCfgBuilder.setExpiry(120000);
  if (targetState != null) {
    workflowCfgBuilder.setTargetState(TargetState.STOP);
  }

  Calendar cal = Calendar.getInstance();
  cal.set(Calendar.MINUTE, cal.get(Calendar.MINUTE) + delayStart / 60);
  cal.set(Calendar.SECOND, cal.get(Calendar.SECOND) + delayStart % 60);
  cal.set(Calendar.MILLISECOND, 0);
  ScheduleConfig scheduleConfig =
      ScheduleConfig.recurringFromDate(cal.getTime(), TimeUnit.SECONDS, recurrenceInSeconds);
  workflowCfgBuilder.setScheduleConfig(scheduleConfig);
  return new JobQueue.Builder(jobQueueName).setWorkflowConfig(workflowCfgBuilder.build());
}
 
Example #2
Source File: ClusterStatusMonitor.java    From helix with Apache License 2.0 6 votes vote down vote up
public void refreshJobsStatus(WorkflowControllerDataProvider cache) {
  for (Map.Entry<String, JobMonitor> jobMonitor : _perTypeJobMonitorMap.entrySet()) {
    jobMonitor.getValue().resetJobGauge();
  }
  for (String workflow : cache.getWorkflowConfigMap().keySet()) {
    if (workflow.isEmpty()) {
      continue;
    }
    WorkflowConfig workflowConfig = cache.getWorkflowConfig(workflow);
    if (workflowConfig == null) {
      continue;
    }
    Set<String> allJobs = workflowConfig.getJobDag().getAllNodes();
    WorkflowContext workflowContext = cache.getWorkflowContext(workflow);
    for (String job : allJobs) {
      TaskState currentState =
          workflowContext == null ? TaskState.NOT_STARTED : workflowContext.getJobState(job);
      updateJobGauges(
          workflowConfig.getJobTypes() == null ? null : workflowConfig.getJobTypes().get(job),
          currentState);
    }
  }
}
 
Example #3
Source File: TestQuotaBasedScheduling.java    From helix with Apache License 2.0 6 votes vote down vote up
/**
 * Tests whether jobs can run successfully without quotaTypes or quota configuration defined in
 * ClusterConfig. This test is to ensure backward-compatibility. This test must go first because
 * we want to make sure there is no quota config set anywhere.
 * @throws InterruptedException
 */
@Test
public void testSchedulingWithoutQuota() throws InterruptedException {
  String workflowName = TestHelper.getTestMethodName();
  Workflow.Builder workflowBuilder = new Workflow.Builder(workflowName);
  WorkflowConfig.Builder configBuilder = new WorkflowConfig.Builder(workflowName);
  configBuilder.setAllowOverlapJobAssignment(true);
  workflowBuilder.setWorkflowConfig(configBuilder.build());

  for (int i = 0; i < 10; i++) {
    List<TaskConfig> taskConfigs = new ArrayList<>();
    taskConfigs.add(new TaskConfig("ShortTask", new HashMap<>()));
    JobConfig.Builder jobConfigBulider = new JobConfig.Builder().setCommand(JOB_COMMAND)
        .addTaskConfigs(taskConfigs).setJobCommandConfigMap(_jobCommandMap);
    workflowBuilder.addJob("JOB" + i, jobConfigBulider);
  }

  _driver.start(workflowBuilder.build());
  _driver.pollForWorkflowState(workflowName, TaskState.COMPLETED);

  for (int i = 0; i < 10; i++) {
    String jobName = workflowName + "_" + "JOB" + i;
    TaskState jobState = _driver.getWorkflowContext(workflowName).getJobState(jobName);
    Assert.assertEquals(jobState, TaskState.COMPLETED);
  }
}
 
Example #4
Source File: TestQuotaBasedScheduling.java    From helix with Apache License 2.0 6 votes vote down vote up
/**
 * Helper method for creating custom workflows.
 * @param workflowName
 * @param shouldOverlapJobAssign
 * @param quotaType
 * @param numJobs
 * @param numTasks
 * @param taskType
 * @return a workflow per parameters given
 */
private Workflow createWorkflow(String workflowName, boolean shouldOverlapJobAssign,
    String quotaType, int numJobs, int numTasks, String taskType) {

  Workflow.Builder workflowBuilder = new Workflow.Builder(workflowName);
  WorkflowConfig.Builder configBuilder = new WorkflowConfig.Builder(workflowName);
  configBuilder.setAllowOverlapJobAssignment(shouldOverlapJobAssign);
  workflowBuilder.setWorkflowConfig(configBuilder.build());

  for (int jobIndex = 0; jobIndex < numJobs; jobIndex++) {
    String jobName = workflowName + "_" + jobIndex;
    List<TaskConfig> taskConfigs = new ArrayList<>();
    for (int taskIndex = 0; taskIndex < numTasks; taskIndex++) {
      Map<String, String> taskConfigMap = new HashMap<>();
      taskConfigs.add(new TaskConfig(taskType, taskConfigMap));
    }
    JobConfig.Builder jobBuilder = new JobConfig.Builder().setCommand(JOB_COMMAND)
        .setJobCommandConfigMap(_jobCommandMap).addTaskConfigs(taskConfigs).setJobType(quotaType);
    workflowBuilder.addJob(jobName, jobBuilder);
  }
  return workflowBuilder.build();
}
 
Example #5
Source File: TestBatchAddJobs.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testBatchAddJobs() throws Exception {
  TaskDriver driver = new TaskDriver(_gZkClient, CLUSTER_NAME);
  driver.createQueue(new JobQueue.Builder(QUEUE_NAME).build());
  for (int i = 0; i < 10; i++) {
    _submitJobTasks.add(new SubmitJobTask(ZK_ADDR, i));
    _submitJobTasks.get(i).start();
  }

  WorkflowConfig workflowConfig = driver.getWorkflowConfig(QUEUE_NAME);
  while (workflowConfig.getJobDag().getAllNodes().size() < 100) {
    Thread.sleep(50);
    driver.getWorkflowConfig(QUEUE_NAME);
  }

  JobDag dag = workflowConfig.getJobDag();
  String currentJob = dag.getAllNodes().iterator().next();
  while (dag.getDirectChildren(currentJob).size() > 0) {
    String childJob = dag.getDirectChildren(currentJob).iterator().next();
    if (!getPrefix(currentJob).equals(getPrefix(childJob))
        && currentJob.charAt(currentJob.length() - 1) != '9') {
      Assert.fail();
    }
    currentJob = childJob;
  }
}
 
Example #6
Source File: ClusterStatusMonitor.java    From helix with Apache License 2.0 6 votes vote down vote up
public void refreshWorkflowsStatus(WorkflowControllerDataProvider cache) {
  for (Map.Entry<String, WorkflowMonitor> workflowMonitor : _perTypeWorkflowMonitorMap
      .entrySet()) {
    workflowMonitor.getValue().resetGauges();
  }

  Map<String, WorkflowConfig> workflowConfigMap = cache.getWorkflowConfigMap();
  for (String workflow : workflowConfigMap.keySet()) {
    if (workflowConfigMap.get(workflow).isRecurring() || workflow.isEmpty()) {
      continue;
    }
    WorkflowContext workflowContext = cache.getWorkflowContext(workflow);
    TaskState currentState =
        workflowContext == null ? TaskState.NOT_STARTED : workflowContext.getWorkflowState();
    updateWorkflowGauges(workflowConfigMap.get(workflow), currentState);
  }
}
 
Example #7
Source File: TestWorkflowTermination.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobQueueNotApplyTimeout() throws InterruptedException {
  String queueName = TestHelper.getTestMethodName();
  long timeout = 1000;
  // Make jobs run success
  JobConfig.Builder jobBuilder = createJobConfigBuilder(queueName, false, 10);
  JobQueue.Builder jobQueue = TaskTestUtil.buildJobQueue(queueName);
  jobQueue
      .setWorkflowConfig(new WorkflowConfig.Builder(queueName).setTimeout(timeout)
          .setWorkFlowType(WORKFLOW_TYPE).build())
      .enqueueJob(JOB_NAME, jobBuilder).enqueueJob(JOB_NAME + 1, jobBuilder);

  _driver.start(jobQueue.build());

  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, JOB_NAME),
      TaskState.COMPLETED);
  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, JOB_NAME + 1),
      TaskState.COMPLETED);

  Thread.sleep(timeout);

  // Verify that job queue is still in progress
  _driver.pollForWorkflowState(queueName, 10000L, TaskState.IN_PROGRESS);
}
 
Example #8
Source File: TestEnqueueJobs.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobQueueAddingJobsOneByOne() throws InterruptedException {
  String queueName = TestHelper.getTestMethodName();
  JobQueue.Builder builder = TaskTestUtil.buildJobQueue(queueName);
  WorkflowConfig.Builder workflowCfgBuilder = new WorkflowConfig.Builder().setWorkflowId(queueName).setParallelJobs(1);
  _driver.start(builder.setWorkflowConfig(workflowCfgBuilder.build()).build());
  JobConfig.Builder jobBuilder =
      new JobConfig.Builder().setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB)
          .setCommand(MockTask.TASK_COMMAND).setMaxAttemptsPerTask(2);
  _driver.enqueueJob(queueName, "JOB0", jobBuilder);
  for (int i = 1; i < 5; i++) {
    _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + (i - 1)),
        10000L, TaskState.COMPLETED);
    _driver.waitToStop(queueName, 5000L);
    _driver.enqueueJob(queueName, "JOB" + i, jobBuilder);
    _driver.resume(queueName);
  }

  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + 4),
      TaskState.COMPLETED);
}
 
Example #9
Source File: TestWorkflowAccessor.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test(dependsOnMethods = "testGetWorkflows")
public void testGetWorkflow() throws IOException {
  System.out.println("Start test :" + TestHelper.getTestMethodName());
  String body = get("clusters/" + CLUSTER_NAME + "/workflows/" + WORKFLOW_NAME, null,
      Response.Status.OK.getStatusCode(), true);
  JsonNode node = OBJECT_MAPPER.readTree(body);
  Assert.assertNotNull(node.get(WorkflowAccessor.WorkflowProperties.WorkflowConfig.name()));
  Assert.assertNotNull(node.get(WorkflowAccessor.WorkflowProperties.WorkflowContext.name()));

  TaskExecutionInfo lastScheduledTask = OBJECT_MAPPER
      .treeToValue(node.get(WorkflowAccessor.WorkflowProperties.LastScheduledTask.name()),
          TaskExecutionInfo.class);
  Assert.assertTrue(lastScheduledTask
      .equals(new TaskExecutionInfo(null, null, null, TaskExecutionInfo.TIMESTAMP_NOT_SET)));
  String workflowId =
      node.get(WorkflowAccessor.WorkflowProperties.WorkflowConfig.name()).get("WorkflowID")
          .getTextValue();
  Assert.assertEquals(workflowId, WORKFLOW_NAME);
  System.out.println("End test :" + TestHelper.getTestMethodName());
}
 
Example #10
Source File: TestEnqueueJobs.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobQueueAddingJobsAtSametime() throws InterruptedException {
  String queueName = TestHelper.getTestMethodName();
  JobQueue.Builder builder = TaskTestUtil.buildJobQueue(queueName);
  WorkflowConfig.Builder workflowCfgBuilder =
      new WorkflowConfig.Builder().setWorkflowId(queueName).setParallelJobs(1);
  _driver.start(builder.setWorkflowConfig(workflowCfgBuilder.build()).build());

  // Adding jobs
  JobConfig.Builder jobBuilder =
      new JobConfig.Builder().setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB)
          .setCommand(MockTask.TASK_COMMAND).setMaxAttemptsPerTask(2);
  _driver.waitToStop(queueName, 5000L);
  for (int i = 0; i < 5; i++) {
    _driver.enqueueJob(queueName, "JOB" + i, jobBuilder);
  }
  _driver.resume(queueName);

  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + 4),
      TaskState.COMPLETED);
}
 
Example #11
Source File: TestRecurringJobQueue.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testCreateStoppedQueue() throws InterruptedException {
  String queueName = TestHelper.getTestMethodName();

  // Create a queue
  LOG.info("Starting job-queue: " + queueName);
  JobQueue.Builder queueBuild = TaskTestUtil.buildRecurrentJobQueue(queueName, 0, 600000,
      TargetState.STOP);
  createAndEnqueueJob(queueBuild, 2);

  _driver.createQueue(queueBuild.build());
  WorkflowConfig workflowConfig = _driver.getWorkflowConfig(queueName);
  Assert.assertEquals(workflowConfig.getTargetState(), TargetState.STOP);

  _driver.resume(queueName);

  //TaskTestUtil.pollForWorkflowState(_driver, queueName, );
  WorkflowContext wCtx = TaskTestUtil.pollForWorkflowContext(_driver, queueName);

  // ensure current schedule is started
  String scheduledQueue = wCtx.getLastScheduledSingleWorkflow();
  _driver.pollForWorkflowState(scheduledQueue, TaskState.COMPLETED);
}
 
Example #12
Source File: TestWorkflowTimeout.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobQueueNotApplyTimeout() throws InterruptedException {
  String queueName = TestHelper.getTestMethodName();
  // Make jobs run success
  _jobBuilder.setWorkflow(queueName).setJobCommandConfigMap(Collections.EMPTY_MAP);
  JobQueue.Builder jobQueue = TaskTestUtil.buildJobQueue(queueName);
  jobQueue.setWorkflowConfig(new WorkflowConfig.Builder(queueName).setTimeout(1000).build())
      .enqueueJob(JOB_NAME, _jobBuilder).enqueueJob(JOB_NAME + 1, _jobBuilder);

  _driver.start(jobQueue.build());

  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, JOB_NAME),
      TaskState.COMPLETED);
  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, JOB_NAME + 1),
      TaskState.COMPLETED);

  // Add back the config
  _jobBuilder.setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "99999999"));
}
 
Example #13
Source File: JobAccessor.java    From helix with Apache License 2.0 6 votes vote down vote up
@GET
public Response getJobs(@PathParam("clusterId") String clusterId,
    @PathParam("workflowName") String workflowName) {
  TaskDriver driver = getTaskDriver(clusterId);
  WorkflowConfig workflowConfig = driver.getWorkflowConfig(workflowName);
  ObjectNode root = JsonNodeFactory.instance.objectNode();

  if (workflowConfig == null) {
    return badRequest(String.format("Workflow %s is not found!", workflowName));
  }

  Set<String> jobs = workflowConfig.getJobDag().getAllNodes();
  root.put(Properties.id.name(), JobProperties.Jobs.name());
  ArrayNode jobsNode = root.putArray(JobProperties.Jobs.name());

  if (jobs != null) {
    jobsNode.addAll((ArrayNode) OBJECT_MAPPER.valueToTree(jobs));
  }
  return JSONRepresentation(root);
}
 
Example #14
Source File: TestTaskAssignmentCalculator.java    From helix with Apache License 2.0 6 votes vote down vote up
/**
 * This test explicitly allows overlap job assignment.
 * @throws InterruptedException
 */
@Test
// This test does NOT allow multiple jobs being assigned to an instance.
public void testMultipleJobAssignmentOverlapEnabled() throws InterruptedException {
  _runCounts.clear();
  failTask = false;
  String workflowName = TestHelper.getTestMethodName();
  Workflow.Builder workflowBuilder = new Workflow.Builder(workflowName);
  WorkflowConfig.Builder configBuilder = new WorkflowConfig.Builder(workflowName);
  configBuilder.setAllowOverlapJobAssignment(true);
  workflowBuilder.setWorkflowConfig(configBuilder.build());

  for (int i = 0; i < 40; i++) {
    List<TaskConfig> taskConfigs = Lists.newArrayListWithCapacity(1);
    taskConfigs.add(new TaskConfig("TaskOne", new HashMap<>()));
    JobConfig.Builder jobBuilder = new JobConfig.Builder().setCommand("DummyCommand")
        .addTaskConfigs(taskConfigs).setJobCommandConfigMap(_jobCommandMap);
    workflowBuilder.addJob("JOB" + i, jobBuilder);
  }

  _driver.start(workflowBuilder.build());
  _driver.pollForWorkflowState(workflowName, TaskState.COMPLETED);

  Assert.assertEquals(_runCounts.size(), 5);
}
 
Example #15
Source File: TaskTestUtil.java    From helix with Apache License 2.0 6 votes vote down vote up
public static JobQueue.Builder buildJobQueue(String jobQueueName, int delayStart,
    int failureThreshold, int capacity) {
  WorkflowConfig.Builder workflowCfgBuilder = new WorkflowConfig.Builder(jobQueueName);
  workflowCfgBuilder.setExpiry(120000);
  workflowCfgBuilder.setCapacity(capacity);

  Calendar cal = Calendar.getInstance();
  cal.set(Calendar.MINUTE, cal.get(Calendar.MINUTE) + delayStart / 60);
  cal.set(Calendar.SECOND, cal.get(Calendar.SECOND) + delayStart % 60);
  cal.set(Calendar.MILLISECOND, 0);
  workflowCfgBuilder.setScheduleConfig(ScheduleConfig.oneTimeDelayedStart(cal.getTime()));

  if (failureThreshold > 0) {
    workflowCfgBuilder.setFailureThreshold(failureThreshold);
  }
  return new JobQueue.Builder(jobQueueName).setWorkflowConfig(workflowCfgBuilder.build());
}
 
Example #16
Source File: WorkflowAccessor.java    From helix with Apache License 2.0 5 votes vote down vote up
@GET
@Path("{workflowId}")
public Response getWorkflow(@PathParam("clusterId") String clusterId,
    @PathParam("workflowId") String workflowId) {
  TaskDriver taskDriver = getTaskDriver(clusterId);
  WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflowId);
  WorkflowContext workflowContext = taskDriver.getWorkflowContext(workflowId);

  ObjectNode root = JsonNodeFactory.instance.objectNode();
  TextNode id = JsonNodeFactory.instance.textNode(workflowId);
  root.put(Properties.id.name(), id);

  ObjectNode workflowConfigNode = JsonNodeFactory.instance.objectNode();
  ObjectNode workflowContextNode = JsonNodeFactory.instance.objectNode();

  if (workflowConfig != null) {
    getWorkflowConfigNode(workflowConfigNode, workflowConfig.getRecord());
  }

  if (workflowContext != null) {
    getWorkflowContextNode(workflowContextNode, workflowContext.getRecord());
  }

  root.put(WorkflowProperties.WorkflowConfig.name(), workflowConfigNode);
  root.put(WorkflowProperties.WorkflowContext.name(), workflowContextNode);

  JobDag jobDag = workflowConfig.getJobDag();
  ArrayNode jobs = OBJECT_MAPPER.valueToTree(jobDag.getAllNodes());
  ObjectNode parentJobs = OBJECT_MAPPER.valueToTree(jobDag.getChildrenToParents());
  root.put(WorkflowProperties.Jobs.name(), jobs);
  root.put(WorkflowProperties.ParentJobs.name(), parentJobs);
  root.put(WorkflowProperties.LastScheduledTask.name(), OBJECT_MAPPER.valueToTree(taskDriver.getLastScheduledTaskExecutionInfo(workflowId)));
  return JSONRepresentation(root);
}
 
Example #17
Source File: WorkflowAccessor.java    From helix with Apache License 2.0 5 votes vote down vote up
@GET
public Response getWorkflows(@PathParam("clusterId") String clusterId) {
  TaskDriver taskDriver = getTaskDriver(clusterId);
  Map<String, WorkflowConfig> workflowConfigMap = taskDriver.getWorkflows();
  Map<String, List<String>> dataMap = new HashMap<>();
  dataMap.put(WorkflowProperties.Workflows.name(), new ArrayList<>(workflowConfigMap.keySet()));

  return JSONRepresentation(dataMap);
}
 
Example #18
Source File: WorkflowAccessor.java    From helix with Apache License 2.0 5 votes vote down vote up
@GET
@Path("{workflowId}/configs")
public Response getWorkflowConfig(@PathParam("clusterId") String clusterId,
    @PathParam("workflowId") String workflowId) {
  TaskDriver taskDriver = getTaskDriver(clusterId);
  WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflowId);
  ObjectNode workflowConfigNode = JsonNodeFactory.instance.objectNode();
  if (workflowConfig != null) {
    getWorkflowConfigNode(workflowConfigNode, workflowConfig.getRecord());
  }

  return JSONRepresentation(workflowConfigNode);
}
 
Example #19
Source File: WorkflowAccessor.java    From helix with Apache License 2.0 5 votes vote down vote up
private void getWorkflowConfigNode(ObjectNode workflowConfigNode, ZNRecord record) {
  for (Map.Entry<String, String> entry : record.getSimpleFields().entrySet()) {
    if (!entry.getKey().equals(WorkflowConfig.WorkflowConfigProperty.Dag)) {
      workflowConfigNode.put(entry.getKey(), JsonNodeFactory.instance.textNode(entry.getValue()));
    }
  }
}
 
Example #20
Source File: TestTaskThreadLeak.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testTaskThreadCount() throws InterruptedException {
  String queueName = "myTestJobQueue";
  JobQueue.Builder queueBuilder = new JobQueue.Builder(queueName);
  String lastJob = null;
  for (int i = 0; i < 5; i++) {
    String db = TestHelper.getTestMethodName() + "_" + i;
    _gSetupTool.addResourceToCluster(CLUSTER_NAME, db, 20, MASTER_SLAVE_STATE_MODEL,
        IdealState.RebalanceMode.FULL_AUTO.name());
    _gSetupTool.rebalanceStorageCluster(CLUSTER_NAME, db, 1);
    JobConfig.Builder jobBuilder =
        new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(db)
            .setNumConcurrentTasksPerInstance(100);
    queueBuilder.addJob(db + "_job", jobBuilder);
    lastJob = db + "_job";
  }

  queueBuilder
      .setWorkflowConfig(new WorkflowConfig.Builder(queueName).setParallelJobs(10).build());

  _driver.start(queueBuilder.build());

  String nameSpacedJob = TaskUtil.getNamespacedJobName(queueName, lastJob);
  _driver.pollForJobState(queueName, nameSpacedJob, TaskState.COMPLETED);


  int threadCountAfter = getThreadCount("TaskStateModelFactory");

  Assert.assertTrue(
      (threadCountAfter - _threadCountBefore) <= TaskStateModelFactory.TASK_THREADPOOL_SIZE + 1);
}
 
Example #21
Source File: TestWorkflowTermination.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testWorkflowSucceed() throws Exception {
  String workflowName = TestHelper.getTestMethodName();
  long workflowExpiry = 2000;
  long timeout = 2000;
  JobConfig.Builder jobBuilder = createJobConfigBuilder(workflowName, false, 50);
  jobBuilder.setWorkflow(workflowName);
  Workflow.Builder workflowBuilder = new Workflow.Builder(workflowName)
      .setWorkflowConfig(new WorkflowConfig.Builder(workflowName).setTimeout(timeout)
          .setWorkFlowType(WORKFLOW_TYPE).build())
      .addJob(JOB_NAME, jobBuilder).setExpiry(workflowExpiry);
  _driver.start(workflowBuilder.build());

  // Timeout is longer than job finish so workflow status should be COMPLETED
  _driver.pollForWorkflowState(workflowName, 5000L, TaskState.COMPLETED);
  long finishTime = _driver.getWorkflowContext(workflowName).getFinishTime();
  WorkflowContext context = _driver.getWorkflowContext(workflowName);
  Assert.assertTrue(context.getFinishTime() - context.getStartTime() < timeout);

  // Workflow should be cleaned up after expiry
  verifyWorkflowCleanup(workflowName, getJobNameToPoll(workflowName, JOB_NAME));
  long cleanUpTime = System.currentTimeMillis();
  Assert.assertTrue(cleanUpTime - finishTime >= workflowExpiry);


  ObjectName objectName = getWorkflowMBeanObjectName(workflowName);
  Assert.assertEquals((long) beanServer.getAttribute(objectName, "SuccessfulWorkflowCount"), 1);
  Assert
      .assertTrue((long) beanServer.getAttribute(objectName, "MaximumWorkflowLatencyGauge") > 0);
  Assert.assertTrue((long) beanServer.getAttribute(objectName, "WorkflowLatencyCount") > 0);

}
 
Example #22
Source File: TestWorkflowJobDependency.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testWorkflowWithDependencies() throws InterruptedException {
  String workflowName = TestHelper.getTestMethodName();
  final int PARALLEL_NUM = 2;
  // Workflow setup
  WorkflowConfig.Builder workflowcfgBuilder =
      new WorkflowConfig.Builder().setWorkflowId(workflowName).setParallelJobs(PARALLEL_NUM);
  Workflow.Builder builder = new Workflow.Builder(workflowName);
  builder.setWorkflowConfig(workflowcfgBuilder.build());

  builder.addParentChildDependency("job" + _testDbs.get(0), "job" + _testDbs.get(1));
  for (int i = 0; i < 2; i++) {
    JobConfig.Builder jobConfig = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND)
        .setTargetResource(_testDbs.get(i))
        .setTargetPartitionStates(Sets.newHashSet("SLAVE", "MASTER"))
        .setJobCommandConfigMap(WorkflowGenerator.DEFAULT_COMMAND_CONFIG);
    String jobName = "job" + _testDbs.get(i);
    builder.addJob(jobName, jobConfig);
  }

  // Start workflow
  Workflow workflow = builder.build();
  _driver.start(workflow);

  // Wait until the workflow completes
  _driver.pollForWorkflowState(workflowName, TaskState.COMPLETED);

  JobContext context1 =
      _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, "job" + _testDbs.get(0)));
  JobContext context2 =
      _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, "job" + _testDbs.get(1)));
  Assert.assertTrue(context2.getStartTime() - context1.getFinishTime() >= 0L);
}
 
Example #23
Source File: TestJobAccessor.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test(dependsOnMethods = "testGetJobContext")
public void testCreateJob() throws IOException {
  System.out.println("Start test :" + TestHelper.getTestMethodName());

  TaskDriver driver = getTaskDriver(CLUSTER_NAME);
  // Create JobQueue
  JobQueue.Builder jobQueue = new JobQueue.Builder(TEST_QUEUE_NAME)
      .setWorkflowConfig(driver.getWorkflowConfig(WORKFLOW_NAME));
  Entity entity = Entity.entity(OBJECT_MAPPER.writeValueAsString(Collections
          .singletonMap(WorkflowAccessor.WorkflowProperties.WorkflowConfig.name(),
              jobQueue.build().getWorkflowConfig().getRecord().getSimpleFields())),
      MediaType.APPLICATION_JSON_TYPE);
  put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME, null, entity,
      Response.Status.OK.getStatusCode());

  // Test enqueue job
  entity = Entity.entity(JOB_INPUT, MediaType.APPLICATION_JSON_TYPE);
  put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME + "/jobs/" + TEST_JOB_NAME,
      null, entity, Response.Status.OK.getStatusCode());

  String jobName = TaskUtil.getNamespacedJobName(TEST_QUEUE_NAME, TEST_JOB_NAME);
  JobConfig jobConfig = driver.getJobConfig(jobName);
  Assert.assertNotNull(jobConfig);

  WorkflowConfig workflowConfig = driver.getWorkflowConfig(TEST_QUEUE_NAME);
  Assert.assertTrue(workflowConfig.getJobDag().getAllNodes().contains(jobName));
  System.out.println("End test :" + TestHelper.getTestMethodName());
}
 
Example #24
Source File: TestRunJobsWithMissingTarget.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test(dependsOnMethods = "testJobFailsWithMissingTarget")
public void testJobContinueUponParentJobFailure() throws Exception {
  String workflowName = TestHelper.getTestMethodName();

  // Create a workflow
  LOG.info("Starting job-queue: " + workflowName);
  Workflow.Builder builder = new Workflow.Builder(workflowName).setWorkflowConfig(
      new WorkflowConfig.Builder(workflowName).setFailureThreshold(10).build());
  // Create and add jobs
  List<String> currentJobNames = new ArrayList<String>();
  for (int i = 0; i < _numDbs; i++) {
    JobConfig.Builder jobConfig =
        new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(_testDbs.get(i))
            .setTargetPartitionStates(Sets.newHashSet("SLAVE")).setIgnoreDependentJobFailure(true);
    String jobName = "job" + _testDbs.get(i);
    builder.addJob(jobName, jobConfig);
    if (i > 0) {
      builder.addParentChildDependency("job" + _testDbs.get(i - 1), "job" + _testDbs.get(i));
    }
    currentJobNames.add(jobName);
  }

  _driver.start(builder.build());

  String namedSpaceJob1 = String.format("%s_%s", workflowName, currentJobNames.get(1));
  _driver.pollForJobState(workflowName, namedSpaceJob1, TaskState.FAILED);
  String lastJob =
      String.format("%s_%s", workflowName, currentJobNames.get(currentJobNames.size() - 1));
  _driver.pollForJobState(workflowName, lastJob, TaskState.COMPLETED);

  _driver.delete(workflowName);
}
 
Example #25
Source File: JobQueuesResource.java    From helix with Apache License 2.0 5 votes vote down vote up
StringRepresentation getHostedEntitiesRepresentation(String clusterName)
    throws JsonGenerationException, JsonMappingException, IOException {
  // Get all resources
  ZkClient zkClient =
      ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);
  HelixDataAccessor accessor =
      ClusterRepresentationUtil.getClusterDataAccessor(zkClient, clusterName);
  PropertyKey.Builder keyBuilder = accessor.keyBuilder();
  Map<String, HelixProperty> resourceConfigMap =
      accessor.getChildValuesMap(keyBuilder.resourceConfigs());

  // Create the result
  ZNRecord hostedEntitiesRecord = new ZNRecord("JobQueues");

  // Filter out non-workflow resources
  Iterator<Map.Entry<String, HelixProperty>> it = resourceConfigMap.entrySet().iterator();
  while (it.hasNext()) {
    Map.Entry<String, HelixProperty> e = it.next();
    HelixProperty resource = e.getValue();
    Map<String, String> simpleFields = resource.getRecord().getSimpleFields();
    boolean isTerminable = resource.getRecord()
        .getBooleanField(WorkflowConfig.WorkflowConfigProperty.Terminable.name(), true);
    if (!simpleFields.containsKey(WorkflowConfig.WorkflowConfigProperty.TargetState.name())
        || !simpleFields.containsKey(WorkflowConfig.WorkflowConfigProperty.Dag.name())
        || isTerminable) {
      it.remove();
    }
  }

  // Populate the result
  List<String> allResources = Lists.newArrayList(resourceConfigMap.keySet());
  hostedEntitiesRecord.setListField("JobQueues", allResources);

  StringRepresentation representation =
      new StringRepresentation(ClusterRepresentationUtil.ZNRecordToJson(hostedEntitiesRecord),
          MediaType.APPLICATION_JSON);

  return representation;
}
 
Example #26
Source File: TestTaskRebalancerParallel.java    From helix with Apache License 2.0 5 votes vote down vote up
/**
 * This test starts 4 jobs in job queue, the job all stuck, and verify that
 * (1) the number of running job does not exceed configured max allowed parallel jobs
 * (2) one instance can only be assigned to one job in the workflow
 */
@Test
public void testWhenDisallowOverlapJobAssignment() throws Exception {
  String queueName = TestHelper.getTestMethodName();

  WorkflowConfig.Builder cfgBuilder = new WorkflowConfig.Builder(queueName);
  cfgBuilder.setParallelJobs(PARALLEL_COUNT);
  cfgBuilder.setAllowOverlapJobAssignment(false);

  JobQueue.Builder queueBuild =
      new JobQueue.Builder(queueName).setWorkflowConfig(cfgBuilder.build());
  JobQueue queue = queueBuild.build();
  _driver.createQueue(queue);

  List<JobConfig.Builder> jobConfigBuilders = new ArrayList<JobConfig.Builder>();
  for (String testDbName : _testDbs) {
    jobConfigBuilders.add(
        new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(testDbName)
            .setTargetPartitionStates(Collections.singleton("SLAVE"))
            .setJobCommandConfigMap(Collections.singletonMap(MockTask.JOB_DELAY, "1000")));
  }

  _driver.stop(queueName);
  for (int i = 0; i < jobConfigBuilders.size(); ++i) {
    _driver.enqueueJob(queueName, "job_" + (i + 1), jobConfigBuilders.get(i));
  }
  _driver.resume(queueName);
  Thread.sleep(1000L);
  Assert.assertTrue(TaskTestUtil.pollForWorkflowParallelState(_driver, queueName));
}
 
Example #27
Source File: TestWorkflowTimeout.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testWorkflowTimeoutWhenWorkflowCompleted() throws InterruptedException {
  String workflowName = TestHelper.getTestMethodName();
  _jobBuilder.setWorkflow(workflowName);
  _jobBuilder.setJobCommandConfigMap(Collections.<String, String> emptyMap());
  Workflow.Builder workflowBuilder = new Workflow.Builder(workflowName)
      .setWorkflowConfig(new WorkflowConfig.Builder(workflowName).setTimeout(0).build())
      .addJob(JOB_NAME, _jobBuilder).setExpiry(2000L);

  _driver.start(workflowBuilder.build());
  // Pause the queue
  Thread.sleep(2500);
  Assert.assertNull(_driver.getWorkflowConfig(workflowName));
  Assert.assertNull(_driver.getJobContext(workflowName));
}
 
Example #28
Source File: TestWorkflowTimeout.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testWorkflowPausedTimeout() throws InterruptedException {
  String workflowName = TestHelper.getTestMethodName();
  _jobBuilder.setWorkflow(workflowName);
  Workflow.Builder workflowBuilder = new Workflow.Builder(workflowName)
      .setWorkflowConfig(new WorkflowConfig.Builder(workflowName).setTimeout(5000).build())
      .addJob(JOB_NAME, _jobBuilder);

  _driver.start(workflowBuilder.build());
  // Pause the queue
  _driver.waitToStop(workflowName, 10000L);

  _driver.pollForWorkflowState(workflowName, 10000L, TaskState.TIMED_OUT);
}
 
Example #29
Source File: TestWorkflowTimeout.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testWorkflowRunningTime() throws InterruptedException {
  String workflowName = TestHelper.getTestMethodName();
  _jobBuilder.setWorkflow(workflowName);
  Workflow.Builder workflowBuilder = new Workflow.Builder(workflowName)
      .setWorkflowConfig(new WorkflowConfig.Builder(workflowName).setTimeout(1000).build())
      .addJob(JOB_NAME, _jobBuilder);
  _driver.start(workflowBuilder.build());

  _driver.pollForWorkflowState(workflowName, 10000L, TaskState.TIMED_OUT);
}
 
Example #30
Source File: TestTaskRebalancerStopResume.java    From helix with Apache License 2.0 5 votes vote down vote up
private void verifyJobNotInQueue(String queueName, String namedSpacedJobName) {
  WorkflowConfig workflowCfg = _driver.getWorkflowConfig(queueName);
  JobDag dag = workflowCfg.getJobDag();
  Assert.assertFalse(dag.getAllNodes().contains(namedSpacedJobName));
  Assert.assertFalse(dag.getChildrenToParents().containsKey(namedSpacedJobName));
  Assert.assertFalse(dag.getParentsToChildren().containsKey(namedSpacedJobName));
}