org.apache.helix.task.JobQueue Java Examples

The following examples show how to use org.apache.helix.task.JobQueue. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestZkConnectionLost.java    From helix with Apache License 2.0 6 votes vote down vote up
private List<String> createAndEnqueueJob(JobQueue.Builder queueBuild, int jobCount) {
  List<String> currentJobNames = new ArrayList<>();
  for (int i = 0; i < jobCount; i++) {
    String targetPartition = (i == 0) ? "MASTER" : "SLAVE";

    JobConfig.Builder jobConfig = new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND)
        .setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB)
        .setTargetPartitionStates(Sets.newHashSet(targetPartition))
        .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "100"));
    String jobName = targetPartition.toLowerCase() + "Job" + i;
    queueBuild.enqueueJob(jobName, jobConfig);
    currentJobNames.add(jobName);
  }
  Assert.assertEquals(currentJobNames.size(), jobCount);
  return currentJobNames;
}
 
Example #2
Source File: PinotHelixTaskResourceManager.java    From incubator-pinot with Apache License 2.0 6 votes vote down vote up
/**
 * Ensure the task queue for the given task type exists.
 *
 * @param taskType Task type
 */
public void ensureTaskQueueExists(String taskType) {
  String helixJobQueueName = getHelixJobQueueName(taskType);
  WorkflowConfig workflowConfig = _taskDriver.getWorkflowConfig(helixJobQueueName);

  if (workflowConfig == null) {
    // Task queue does not exist
    LOGGER.info("Creating task queue: {} for task type: {}", helixJobQueueName, taskType);

    // Set full parallelism
    // Don't allow overlap job assignment so that we can control number of concurrent tasks per instance
    JobQueue jobQueue = new JobQueue.Builder(helixJobQueueName)
        .setWorkflowConfig(new WorkflowConfig.Builder().setParallelJobs(Integer.MAX_VALUE).build()).build();
    _taskDriver.createQueue(jobQueue);
  }

  // Wait until task queue context shows up
  while (_taskDriver.getWorkflowContext(helixJobQueueName) == null) {
    Uninterruptibles.sleepUninterruptibly(100, TimeUnit.MILLISECONDS);
  }
}
 
Example #3
Source File: TestRecurringJobQueue.java    From helix with Apache License 2.0 6 votes vote down vote up
private List<String> createAndEnqueueJob(JobQueue.Builder queueBuild, int jobCount) {
  List<String> currentJobNames = new ArrayList<String>();
  for (int i = 0; i < jobCount; i++) {
    String targetPartition = (i == 0) ? "MASTER" : "SLAVE";

    JobConfig.Builder jobConfig =
        new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND)
            .setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB)
            .setTargetPartitionStates(Sets.newHashSet(targetPartition));
    String jobName = targetPartition.toLowerCase() + "Job" + i;
    queueBuild.enqueueJob(jobName, jobConfig);
    currentJobNames.add(jobName);
  }
  Assert.assertEquals(currentJobNames.size(), jobCount);
  return currentJobNames;
}
 
Example #4
Source File: TestRecurringJobQueue.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testCreateStoppedQueue() throws InterruptedException {
  String queueName = TestHelper.getTestMethodName();

  // Create a queue
  LOG.info("Starting job-queue: " + queueName);
  JobQueue.Builder queueBuild = TaskTestUtil.buildRecurrentJobQueue(queueName, 0, 600000,
      TargetState.STOP);
  createAndEnqueueJob(queueBuild, 2);

  _driver.createQueue(queueBuild.build());
  WorkflowConfig workflowConfig = _driver.getWorkflowConfig(queueName);
  Assert.assertEquals(workflowConfig.getTargetState(), TargetState.STOP);

  _driver.resume(queueName);

  //TaskTestUtil.pollForWorkflowState(_driver, queueName, );
  WorkflowContext wCtx = TaskTestUtil.pollForWorkflowContext(_driver, queueName);

  // ensure current schedule is started
  String scheduledQueue = wCtx.getLastScheduledSingleWorkflow();
  _driver.pollForWorkflowState(scheduledQueue, TaskState.COMPLETED);
}
 
Example #5
Source File: TestEnqueueJobs.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobQueueAddingJobsAtSametime() throws InterruptedException {
  String queueName = TestHelper.getTestMethodName();
  JobQueue.Builder builder = TaskTestUtil.buildJobQueue(queueName);
  WorkflowConfig.Builder workflowCfgBuilder =
      new WorkflowConfig.Builder().setWorkflowId(queueName).setParallelJobs(1);
  _driver.start(builder.setWorkflowConfig(workflowCfgBuilder.build()).build());

  // Adding jobs
  JobConfig.Builder jobBuilder =
      new JobConfig.Builder().setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB)
          .setCommand(MockTask.TASK_COMMAND).setMaxAttemptsPerTask(2);
  _driver.waitToStop(queueName, 5000L);
  for (int i = 0; i < 5; i++) {
    _driver.enqueueJob(queueName, "JOB" + i, jobBuilder);
  }
  _driver.resume(queueName);

  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + 4),
      TaskState.COMPLETED);
}
 
Example #6
Source File: TestEnqueueJobs.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobQueueAddingJobsOneByOne() throws InterruptedException {
  String queueName = TestHelper.getTestMethodName();
  JobQueue.Builder builder = TaskTestUtil.buildJobQueue(queueName);
  WorkflowConfig.Builder workflowCfgBuilder = new WorkflowConfig.Builder().setWorkflowId(queueName).setParallelJobs(1);
  _driver.start(builder.setWorkflowConfig(workflowCfgBuilder.build()).build());
  JobConfig.Builder jobBuilder =
      new JobConfig.Builder().setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB)
          .setCommand(MockTask.TASK_COMMAND).setMaxAttemptsPerTask(2);
  _driver.enqueueJob(queueName, "JOB0", jobBuilder);
  for (int i = 1; i < 5; i++) {
    _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + (i - 1)),
        10000L, TaskState.COMPLETED);
    _driver.waitToStop(queueName, 5000L);
    _driver.enqueueJob(queueName, "JOB" + i, jobBuilder);
    _driver.resume(queueName);
  }

  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + 4),
      TaskState.COMPLETED);
}
 
Example #7
Source File: TestWorkflowTermination.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobQueueNotApplyTimeout() throws InterruptedException {
  String queueName = TestHelper.getTestMethodName();
  long timeout = 1000;
  // Make jobs run success
  JobConfig.Builder jobBuilder = createJobConfigBuilder(queueName, false, 10);
  JobQueue.Builder jobQueue = TaskTestUtil.buildJobQueue(queueName);
  jobQueue
      .setWorkflowConfig(new WorkflowConfig.Builder(queueName).setTimeout(timeout)
          .setWorkFlowType(WORKFLOW_TYPE).build())
      .enqueueJob(JOB_NAME, jobBuilder).enqueueJob(JOB_NAME + 1, jobBuilder);

  _driver.start(jobQueue.build());

  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, JOB_NAME),
      TaskState.COMPLETED);
  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, JOB_NAME + 1),
      TaskState.COMPLETED);

  Thread.sleep(timeout);

  // Verify that job queue is still in progress
  _driver.pollForWorkflowState(queueName, 10000L, TaskState.IN_PROGRESS);
}
 
Example #8
Source File: TestStopAndResumeQueue.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testStopAndResumeQueue() throws Exception {
  String jobQueueName = TestHelper.getTestMethodName();
  JobConfig.Builder jobBuilder0 =
      new JobConfig.Builder().setWorkflow(jobQueueName).setTargetResource(DATABASE)
          .setTargetPartitionStates(Sets.newHashSet(MasterSlaveSMD.States.MASTER.name()))
          .setCommand(MockTask.TASK_COMMAND)
          .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "1000"));

  JobQueue.Builder jobQueue = TaskTestUtil.buildJobQueue(jobQueueName);
  jobQueue.enqueueJob("JOB0", jobBuilder0);
  _driver.start(jobQueue.build());
  _driver.pollForWorkflowState(jobQueueName, TaskState.IN_PROGRESS);
  _driver.pollForJobState(jobQueueName, TaskUtil.getNamespacedJobName(jobQueueName, "JOB0"),
      TaskState.COMPLETED);
  _driver.waitToStop(jobQueueName, 50000L);
  _driver.resume(jobQueueName);
  // Resume should change the workflow context's state to IN_PROGRESS even when there is no job
  // running
  _driver.pollForWorkflowState(jobQueueName, TaskState.IN_PROGRESS);
}
 
Example #9
Source File: TestBatchAddJobs.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testBatchAddJobs() throws Exception {
  TaskDriver driver = new TaskDriver(_gZkClient, CLUSTER_NAME);
  driver.createQueue(new JobQueue.Builder(QUEUE_NAME).build());
  for (int i = 0; i < 10; i++) {
    _submitJobTasks.add(new SubmitJobTask(ZK_ADDR, i));
    _submitJobTasks.get(i).start();
  }

  WorkflowConfig workflowConfig = driver.getWorkflowConfig(QUEUE_NAME);
  while (workflowConfig.getJobDag().getAllNodes().size() < 100) {
    Thread.sleep(50);
    driver.getWorkflowConfig(QUEUE_NAME);
  }

  JobDag dag = workflowConfig.getJobDag();
  String currentJob = dag.getAllNodes().iterator().next();
  while (dag.getDirectChildren(currentJob).size() > 0) {
    String childJob = dag.getDirectChildren(currentJob).iterator().next();
    if (!getPrefix(currentJob).equals(getPrefix(childJob))
        && currentJob.charAt(currentJob.length() - 1) != '9') {
      Assert.fail();
    }
    currentJob = childJob;
  }
}
 
Example #10
Source File: TestWorkflowTimeout.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobQueueNotApplyTimeout() throws InterruptedException {
  String queueName = TestHelper.getTestMethodName();
  // Make jobs run success
  _jobBuilder.setWorkflow(queueName).setJobCommandConfigMap(Collections.EMPTY_MAP);
  JobQueue.Builder jobQueue = TaskTestUtil.buildJobQueue(queueName);
  jobQueue.setWorkflowConfig(new WorkflowConfig.Builder(queueName).setTimeout(1000).build())
      .enqueueJob(JOB_NAME, _jobBuilder).enqueueJob(JOB_NAME + 1, _jobBuilder);

  _driver.start(jobQueue.build());

  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, JOB_NAME),
      TaskState.COMPLETED);
  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, JOB_NAME + 1),
      TaskState.COMPLETED);

  // Add back the config
  _jobBuilder.setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "99999999"));
}
 
Example #11
Source File: TestStoppingQueueFailToStop.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testStoppingQueueFailToStop() throws Exception {
  String jobQueueName = TestHelper.getTestMethodName();
  JobConfig.Builder jobBuilder0 =
      new JobConfig.Builder().setWorkflow(jobQueueName).setTargetResource(DATABASE)
          .setTargetPartitionStates(Sets.newHashSet(MasterSlaveSMD.States.MASTER.name()))
          .setCommand(MockTask.TASK_COMMAND)
          .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "100000"));

  JobQueue.Builder jobQueue = TaskTestUtil.buildJobQueue(jobQueueName);
  jobQueue.enqueueJob("JOB0", jobBuilder0);
  _driver.start(jobQueue.build());
  _driver.pollForJobState(jobQueueName, TaskUtil.getNamespacedJobName(jobQueueName, "JOB0"),
      TaskState.IN_PROGRESS);
  boolean exceptionHappened = false;
  try {
    _driver.waitToStop(jobQueueName, 5000L);
  } catch (HelixException e) {
    exceptionHappened = true;
  }
  _driver.pollForWorkflowState(jobQueueName, TaskState.STOPPING);
  Assert.assertTrue(exceptionHappened);
  latch.countDown();
}
 
Example #12
Source File: TestJobFailureDependence.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobDependantWorkflowFailure() throws Exception {
  String queueName = TestHelper.getTestMethodName();

  // Create a queue
  LOG.info("Starting job-queue: " + queueName);
  JobQueue.Builder queueBuilder = TaskTestUtil.buildJobQueue(queueName);
  // Create and Enqueue jobs
  List<String> currentJobNames = new ArrayList<String>();
  for (int i = 0; i < _numDbs; i++) {
    JobConfig.Builder jobConfig =
        new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(_testDbs.get(i))
            .setTargetPartitionStates(Sets.newHashSet("SLAVE"));
    String jobName = "job" + _testDbs.get(i);
    queueBuilder.enqueueJob(jobName, jobConfig);
    currentJobNames.add(jobName);
  }

  _driver.start(queueBuilder.build());
  _gSetupTool.dropResourceFromCluster(CLUSTER_NAME, _testDbs.get(2));

  String namedSpaceJob1 = String.format("%s_%s", queueName, currentJobNames.get(2));
  _driver.pollForJobState(queueName, namedSpaceJob1, TaskState.FAILED);
}
 
Example #13
Source File: TestJobQueueCleanUp.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testJobQueueCleanUp() throws InterruptedException {
  String queueName = TestHelper.getTestMethodName();
  JobQueue.Builder builder = TaskTestUtil.buildJobQueue(queueName);
  JobConfig.Builder jobBuilder =
      new JobConfig.Builder().setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB)
          .setCommand(MockTask.TASK_COMMAND).setMaxAttemptsPerTask(2)
          .setJobCommandConfigMap(ImmutableMap.of(MockTask.SUCCESS_COUNT_BEFORE_FAIL, "2"));
  for (int i = 0; i < 5; i++) {
    builder.enqueueJob("JOB" + i, jobBuilder);
  }
  _driver.start(builder.build());
  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + 4),
      TaskState.FAILED);
  _driver.cleanupQueue(queueName);
  Assert.assertEquals(_driver.getWorkflowConfig(queueName).getJobDag().size(), 0);
}
 
Example #14
Source File: TestJobQueueCleanUp.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test public void testJobQueueNotCleanupRunningJobs() throws InterruptedException {
  String queueName = TestHelper.getTestMethodName();
  JobQueue.Builder builder = TaskTestUtil.buildJobQueue(queueName);
  JobConfig.Builder jobBuilder =
      new JobConfig.Builder().setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB)
          .setCommand(MockTask.TASK_COMMAND).setMaxAttemptsPerTask(2);
  for (int i = 0; i < 3; i++) {
    builder.enqueueJob("JOB" + i, jobBuilder);
  }
  builder.enqueueJob("JOB" + 3,
      jobBuilder.setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "1000000")));
  builder.enqueueJob("JOB" + 4, jobBuilder);
  _driver.start(builder.build());
  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + 3),
      TaskState.IN_PROGRESS);
  _driver.cleanupQueue(queueName);
  Assert.assertEquals(_driver.getWorkflowConfig(queueName).getJobDag().size(), 2);
}
 
Example #15
Source File: TaskTestUtil.java    From helix with Apache License 2.0 6 votes vote down vote up
public static JobQueue.Builder buildRecurrentJobQueue(String jobQueueName, int delayStart,
    int recurrenceInSeconds, TargetState targetState) {
  WorkflowConfig.Builder workflowCfgBuilder = new WorkflowConfig.Builder(jobQueueName);
  workflowCfgBuilder.setExpiry(120000);
  if (targetState != null) {
    workflowCfgBuilder.setTargetState(TargetState.STOP);
  }

  Calendar cal = Calendar.getInstance();
  cal.set(Calendar.MINUTE, cal.get(Calendar.MINUTE) + delayStart / 60);
  cal.set(Calendar.SECOND, cal.get(Calendar.SECOND) + delayStart % 60);
  cal.set(Calendar.MILLISECOND, 0);
  ScheduleConfig scheduleConfig =
      ScheduleConfig.recurringFromDate(cal.getTime(), TimeUnit.SECONDS, recurrenceInSeconds);
  workflowCfgBuilder.setScheduleConfig(scheduleConfig);
  return new JobQueue.Builder(jobQueueName).setWorkflowConfig(workflowCfgBuilder.build());
}
 
Example #16
Source File: TaskTestUtil.java    From helix with Apache License 2.0 6 votes vote down vote up
public static JobQueue.Builder buildJobQueue(String jobQueueName, int delayStart,
    int failureThreshold, int capacity) {
  WorkflowConfig.Builder workflowCfgBuilder = new WorkflowConfig.Builder(jobQueueName);
  workflowCfgBuilder.setExpiry(120000);
  workflowCfgBuilder.setCapacity(capacity);

  Calendar cal = Calendar.getInstance();
  cal.set(Calendar.MINUTE, cal.get(Calendar.MINUTE) + delayStart / 60);
  cal.set(Calendar.SECOND, cal.get(Calendar.SECOND) + delayStart % 60);
  cal.set(Calendar.MILLISECOND, 0);
  workflowCfgBuilder.setScheduleConfig(ScheduleConfig.oneTimeDelayedStart(cal.getTime()));

  if (failureThreshold > 0) {
    workflowCfgBuilder.setFailureThreshold(failureThreshold);
  }
  return new JobQueue.Builder(jobQueueName).setWorkflowConfig(workflowCfgBuilder.build());
}
 
Example #17
Source File: TestScheduleDelayTask.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testJobQueueDelay() throws InterruptedException {
  String workflowName = TestHelper.getTestMethodName();
  JobQueue.Builder queueBuild = TaskTestUtil.buildJobQueue(workflowName);

  JobConfig.Builder jobBuilder =
      new JobConfig.Builder().setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB)
          .setCommand(MockTask.TASK_COMMAND).setMaxAttemptsPerTask(2)
          .setJobCommandConfigMap(WorkflowGenerator.DEFAULT_COMMAND_CONFIG);

  for (int i = 1; i < 4; i++) {
    queueBuild.enqueueJob("Job" + i, jobBuilder);
  }
  queueBuild.enqueueJob("Job4", jobBuilder.setExecutionDelay(2000L));

  _driver.start(queueBuild.build());
  _driver.pollForJobState(workflowName, TaskUtil.getNamespacedJobName(workflowName, "Job4"),
      TaskState.COMPLETED);

  long jobFinishTime =
      _driver.getJobContext(TaskUtil.getNamespacedJobName(workflowName, "Job3")).getFinishTime();

  long jobTwoStartTime = _driver.getWorkflowContext(workflowName)
      .getJobStartTime(TaskUtil.getNamespacedJobName(workflowName, "Job4"));

  Assert.assertTrue(jobTwoStartTime - jobFinishTime >= 2000L);
}
 
Example #18
Source File: TestZkConnectionLost.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test(dependsOnMethods = {
    "testLostZkConnection"
}, enabled = false)
public void testLostZkConnectionNegative() throws Exception {
  System.setProperty(SystemPropertyKeys.ZK_WAIT_CONNECTED_TIMEOUT, "10");
  System.setProperty(SystemPropertyKeys.ZK_SESSION_TIMEOUT, "1000");

  try {
    String queueName = TestHelper.getTestMethodName();

    stopParticipants();
    startParticipants(_zkAddr);

    LOG.info("Starting job-queue: " + queueName);
    JobQueue.Builder queueBuild = TaskTestUtil.buildRecurrentJobQueue(queueName, 0, 6000);
    createAndEnqueueJob(queueBuild, 3);

    _driver.start(queueBuild.build());

    restartZkServer();

    WorkflowContext wCtx = TaskTestUtil.pollForWorkflowContext(_driver, queueName);
    // ensure job 1 is started before stop it
    String scheduledQueue = wCtx.getLastScheduledSingleWorkflow();

    try {
      _driver.pollForWorkflowState(scheduledQueue, 30000, TaskState.COMPLETED);
      Assert.fail("Test failure!");
    } catch (HelixException ex) {
      // test succeeded
    }
  } finally {
    System.clearProperty(SystemPropertyKeys.ZK_WAIT_CONNECTED_TIMEOUT);
    System.clearProperty(SystemPropertyKeys.ZK_SESSION_TIMEOUT);
  }
}
 
Example #19
Source File: JobQueuesResource.java    From helix with Apache License 2.0 5 votes vote down vote up
/**
 * Add a new job queue
 * <p>
 * Usage:
 * <code>curl -d @'{jobQueueConfig.yaml}'
 * -H 'Content-Type: application/json' http://{host:port}/clusters/{clusterName}/jobQueues
 * <p>
 * For jobQueueConfig.yaml, see {@link Workflow#parse(String)}
 */
@Override
public Representation post(Representation entity) {
  try {
    String clusterName =
        ResourceUtil.getAttributeFromRequest(getRequest(), ResourceUtil.RequestKey.CLUSTER_NAME);
    ZkClient zkClient =
        ResourceUtil.getAttributeFromCtx(getContext(), ResourceUtil.ContextKey.ZKCLIENT);

    Form form = new Form(entity);
    // Get the job queue and submit it
    if (form.size() < 1) {
      throw new HelixException("Yaml job queue config is required!");
    }
    Parameter payload = form.get(0);
    String yamlPayload = payload.getName();
    if (yamlPayload == null) {
      throw new HelixException("Yaml job queue config is required!");
    }

    Workflow workflow = Workflow.parse(yamlPayload);
    JobQueue.Builder jobQueueCfgBuilder = new JobQueue.Builder(workflow.getName());
    jobQueueCfgBuilder.fromMap(workflow.getWorkflowConfig().getResourceConfigMap());
    TaskDriver driver = new TaskDriver(zkClient, clusterName);
    driver.createQueue(jobQueueCfgBuilder.build());

    getResponse().setEntity(getHostedEntitiesRepresentation(clusterName));
    getResponse().setStatus(Status.SUCCESS_OK);
  } catch (Exception e) {
    getResponse().setEntity(ClusterRepresentationUtil.getErrorAsJsonStringFromException(e),
        MediaType.APPLICATION_JSON);
    getResponse().setStatus(Status.SUCCESS_OK);
    LOG.error("Exception in posting job queue: " + entity, e);
  }
  return null;
}
 
Example #20
Source File: TestWorkflowAccessor.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test(dependsOnMethods = "testGetWorkflowContext")
public void testCreateWorkflow() throws IOException {
  System.out.println("Start test :" + TestHelper.getTestMethodName());
  TaskDriver driver = getTaskDriver(CLUSTER_NAME);

  // Create one time workflow
  Entity entity = Entity.entity(WORKFLOW_INPUT, MediaType.APPLICATION_JSON_TYPE);
  put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_WORKFLOW_NAME, null, entity,
      Response.Status.OK.getStatusCode());

  WorkflowConfig workflowConfig = driver.getWorkflowConfig(TEST_WORKFLOW_NAME);
  Assert.assertNotNull(workflowConfig);
  Assert.assertEquals(workflowConfig.getJobDag().getAllNodes().size(), 2);

  // Create JobQueue
  JobQueue.Builder jobQueue = new JobQueue.Builder(TEST_QUEUE_NAME)
      .setWorkflowConfig(driver.getWorkflowConfig(TEST_WORKFLOW_NAME));
  entity = Entity.entity(OBJECT_MAPPER.writeValueAsString(Collections
          .singletonMap(WorkflowAccessor.WorkflowProperties.WorkflowConfig.name(),
              jobQueue.build().getWorkflowConfig().getRecord().getSimpleFields())),
      MediaType.APPLICATION_JSON_TYPE);
  put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME, null, entity,
      Response.Status.OK.getStatusCode());

  workflowConfig = driver.getWorkflowConfig(TEST_QUEUE_NAME);
  Assert.assertNotNull(workflowConfig);
  Assert.assertTrue(workflowConfig.isJobQueue());
  Assert.assertEquals(workflowConfig.getJobDag().getAllNodes().size(), 0);
  System.out.println("End test :" + TestHelper.getTestMethodName());
}
 
Example #21
Source File: TestJobAccessor.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test(dependsOnMethods = "testGetJobContext")
public void testCreateJob() throws IOException {
  System.out.println("Start test :" + TestHelper.getTestMethodName());

  TaskDriver driver = getTaskDriver(CLUSTER_NAME);
  // Create JobQueue
  JobQueue.Builder jobQueue = new JobQueue.Builder(TEST_QUEUE_NAME)
      .setWorkflowConfig(driver.getWorkflowConfig(WORKFLOW_NAME));
  Entity entity = Entity.entity(OBJECT_MAPPER.writeValueAsString(Collections
          .singletonMap(WorkflowAccessor.WorkflowProperties.WorkflowConfig.name(),
              jobQueue.build().getWorkflowConfig().getRecord().getSimpleFields())),
      MediaType.APPLICATION_JSON_TYPE);
  put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME, null, entity,
      Response.Status.OK.getStatusCode());

  // Test enqueue job
  entity = Entity.entity(JOB_INPUT, MediaType.APPLICATION_JSON_TYPE);
  put("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME + "/jobs/" + TEST_JOB_NAME,
      null, entity, Response.Status.OK.getStatusCode());

  String jobName = TaskUtil.getNamespacedJobName(TEST_QUEUE_NAME, TEST_JOB_NAME);
  JobConfig jobConfig = driver.getJobConfig(jobName);
  Assert.assertNotNull(jobConfig);

  WorkflowConfig workflowConfig = driver.getWorkflowConfig(TEST_QUEUE_NAME);
  Assert.assertTrue(workflowConfig.getJobDag().getAllNodes().contains(jobName));
  System.out.println("End test :" + TestHelper.getTestMethodName());
}
 
Example #22
Source File: TestUserContentStore.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testJobContentPutAndGetWithDependency() throws InterruptedException {
  String queueName = TestHelper.getTestMethodName();
  JobQueue.Builder queueBuilder = TaskTestUtil.buildJobQueue(queueName, 0, 100);

  List<TaskConfig> taskConfigs1 = Lists.newArrayListWithCapacity(1);
  List<TaskConfig> taskConfigs2 = Lists.newArrayListWithCapacity(1);
  Map<String, String> taskConfigMap1 = Maps.newHashMap();
  Map<String, String> taskConfigMap2 = Maps.newHashMap();
  TaskConfig taskConfig1 = new TaskConfig("TaskOne", taskConfigMap1);
  TaskConfig taskConfig2 = new TaskConfig("TaskTwo", taskConfigMap2);

  taskConfigs1.add(taskConfig1);
  taskConfigs2.add(taskConfig2);
  Map<String, String> jobCommandMap = Maps.newHashMap();
  jobCommandMap.put("Timeout", "1000");

  JobConfig.Builder jobBuilder1 =
      new JobConfig.Builder().setCommand("DummyCommand").addTaskConfigs(taskConfigs1)
          .setJobCommandConfigMap(jobCommandMap).setWorkflow(queueName);
  JobConfig.Builder jobBuilder2 =
      new JobConfig.Builder().setCommand("DummyCommand").addTaskConfigs(taskConfigs2)
          .setJobCommandConfigMap(jobCommandMap).setWorkflow(queueName);

  queueBuilder.enqueueJob(queueName + 0, jobBuilder1);
  queueBuilder.enqueueJob(queueName + 1, jobBuilder2);

  _driver.start(queueBuilder.build());
  _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, queueName + 1),
      TaskState.COMPLETED);
  Assert.assertEquals(_driver.getWorkflowContext(queueName)
      .getJobState(TaskUtil.getNamespacedJobName(queueName, queueName + 1)), TaskState.COMPLETED);
}
 
Example #23
Source File: TestGenericJobs.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test public void testGenericJobs() throws Exception {
  String queueName = TestHelper.getTestMethodName();

  // Create a queue
  LOG.info("Starting job-queue: " + queueName);
  JobQueue.Builder queueBuilder = TaskTestUtil.buildJobQueue(queueName);

  // Create and Enqueue jobs
  int num_jobs = 4;
  List<String> currentJobNames = new ArrayList<String>();
  for (int i = 0; i < num_jobs; i++) {
    JobConfig.Builder jobConfig = new JobConfig.Builder();

    // create each task configs.
    List<TaskConfig> taskConfigs = new ArrayList<TaskConfig>();
    int num_tasks = 10;
    for (int j = 0; j < num_tasks; j++) {
      taskConfigs.add(
          new TaskConfig.Builder().setTaskId("task_" + j).setCommand(MockTask.TASK_COMMAND)
              .build());
    }
    jobConfig.addTaskConfigs(taskConfigs);

    String jobName = "job_" + i;
    queueBuilder.enqueueJob(jobName, jobConfig);
    currentJobNames.add(jobName);
  }

  _driver.start(queueBuilder.build());

  String namedSpaceJob =
      String.format("%s_%s", queueName, currentJobNames.get(currentJobNames.size() - 1));
  _driver.pollForJobState(queueName, namedSpaceJob, TaskState.COMPLETED);
}
 
Example #24
Source File: TestTaskThreadLeak.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testTaskThreadCount() throws InterruptedException {
  String queueName = "myTestJobQueue";
  JobQueue.Builder queueBuilder = new JobQueue.Builder(queueName);
  String lastJob = null;
  for (int i = 0; i < 5; i++) {
    String db = TestHelper.getTestMethodName() + "_" + i;
    _gSetupTool.addResourceToCluster(CLUSTER_NAME, db, 20, MASTER_SLAVE_STATE_MODEL,
        IdealState.RebalanceMode.FULL_AUTO.name());
    _gSetupTool.rebalanceStorageCluster(CLUSTER_NAME, db, 1);
    JobConfig.Builder jobBuilder =
        new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(db)
            .setNumConcurrentTasksPerInstance(100);
    queueBuilder.addJob(db + "_job", jobBuilder);
    lastJob = db + "_job";
  }

  queueBuilder
      .setWorkflowConfig(new WorkflowConfig.Builder(queueName).setParallelJobs(10).build());

  _driver.start(queueBuilder.build());

  String nameSpacedJob = TaskUtil.getNamespacedJobName(queueName, lastJob);
  _driver.pollForJobState(queueName, nameSpacedJob, TaskState.COMPLETED);


  int threadCountAfter = getThreadCount("TaskStateModelFactory");

  Assert.assertTrue(
      (threadCountAfter - _threadCountBefore) <= TaskStateModelFactory.TASK_THREADPOOL_SIZE + 1);
}
 
Example #25
Source File: TestStopWorkflow.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testStopWorkflow() throws InterruptedException {
  stopTestSetup(5);

  String jobQueueName = TestHelper.getTestMethodName();
  JobConfig.Builder jobBuilder = JobConfig.Builder.fromMap(WorkflowGenerator.DEFAULT_JOB_CONFIG)
      .setMaxAttemptsPerTask(1).setWorkflow(jobQueueName)
      .setJobCommandConfigMap(ImmutableMap.of(MockTask.SUCCESS_COUNT_BEFORE_FAIL, "1"));

  JobQueue.Builder jobQueue = TaskTestUtil.buildJobQueue(jobQueueName);
  jobQueue.enqueueJob("job1_will_succeed", jobBuilder);
  jobQueue.enqueueJob("job2_will_fail", jobBuilder);
  _driver.start(jobQueue.build());

  // job1 should succeed and job2 should fail, wait until that happens
  _driver.pollForJobState(jobQueueName,
      TaskUtil.getNamespacedJobName(jobQueueName, "job2_will_fail"), TaskState.FAILED);

  Assert.assertEquals(TaskState.IN_PROGRESS,
      _driver.getWorkflowContext(jobQueueName).getWorkflowState());

  // Now stop the workflow, and it should be stopped because all jobs have completed or failed.
  _driver.waitToStop(jobQueueName, 4000);
  _driver.pollForWorkflowState(jobQueueName, TaskState.STOPPED);

  Assert.assertEquals(TaskState.STOPPED,
      _driver.getWorkflowContext(jobQueueName).getWorkflowState());

  cleanupParticipants(5);
}
 
Example #26
Source File: TestTaskRebalancerParallel.java    From helix with Apache License 2.0 5 votes vote down vote up
/**
 * This test starts 4 jobs in job queue, the job all stuck, and verify that
 * (1) the number of running job does not exceed configured max allowed parallel jobs
 * (2) one instance can only be assigned to one job in the workflow
 */
@Test
public void testWhenDisallowOverlapJobAssignment() throws Exception {
  String queueName = TestHelper.getTestMethodName();

  WorkflowConfig.Builder cfgBuilder = new WorkflowConfig.Builder(queueName);
  cfgBuilder.setParallelJobs(PARALLEL_COUNT);
  cfgBuilder.setAllowOverlapJobAssignment(false);

  JobQueue.Builder queueBuild =
      new JobQueue.Builder(queueName).setWorkflowConfig(cfgBuilder.build());
  JobQueue queue = queueBuild.build();
  _driver.createQueue(queue);

  List<JobConfig.Builder> jobConfigBuilders = new ArrayList<JobConfig.Builder>();
  for (String testDbName : _testDbs) {
    jobConfigBuilders.add(
        new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(testDbName)
            .setTargetPartitionStates(Collections.singleton("SLAVE"))
            .setJobCommandConfigMap(Collections.singletonMap(MockTask.JOB_DELAY, "1000")));
  }

  _driver.stop(queueName);
  for (int i = 0; i < jobConfigBuilders.size(); ++i) {
    _driver.enqueueJob(queueName, "job_" + (i + 1), jobConfigBuilders.get(i));
  }
  _driver.resume(queueName);
  Thread.sleep(1000L);
  Assert.assertTrue(TaskTestUtil.pollForWorkflowParallelState(_driver, queueName));
}
 
Example #27
Source File: TestDeleteWorkflow.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testDeleteWorkflow() throws InterruptedException {
  String jobQueueName = TestHelper.getTestMethodName();
  JobConfig.Builder jobBuilder = JobConfig.Builder.fromMap(WorkflowGenerator.DEFAULT_JOB_CONFIG)
      .setMaxAttemptsPerTask(1).setWorkflow(jobQueueName)
      .setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "100000"));

  JobQueue.Builder jobQueue = TaskTestUtil.buildJobQueue(jobQueueName);
  jobQueue.enqueueJob("job1", jobBuilder);
  _driver.start(jobQueue.build());
  _driver.pollForJobState(jobQueueName, TaskUtil.getNamespacedJobName(jobQueueName, "job1"),
      TaskState.IN_PROGRESS);

  // Check that WorkflowConfig, WorkflowContext, and IdealState are indeed created for this job
  // queue
  Assert.assertNotNull(_driver.getWorkflowConfig(jobQueueName));
  Assert.assertNotNull(_driver.getWorkflowContext(jobQueueName));
  Assert.assertNotNull(admin.getResourceIdealState(CLUSTER_NAME, jobQueueName));

  // Pause the Controller so that the job queue won't get deleted
  admin.enableCluster(CLUSTER_NAME, false);
  Thread.sleep(1000);
  // Attempt the deletion and time out
  try {
    _driver.deleteAndWaitForCompletion(jobQueueName, DELETE_DELAY);
    Assert.fail(
        "Delete must time out and throw a HelixException with the Controller paused, but did not!");
  } catch (HelixException e) {
    // Pass
  }

  // Resume the Controller and call delete again
  admin.enableCluster(CLUSTER_NAME, true);
  _driver.deleteAndWaitForCompletion(jobQueueName, DELETE_DELAY);

  // Check that the deletion operation completed
  Assert.assertNull(_driver.getWorkflowConfig(jobQueueName));
  Assert.assertNull(_driver.getWorkflowContext(jobQueueName));
  Assert.assertNull(admin.getResourceIdealState(CLUSTER_NAME, jobQueueName));
}
 
Example #28
Source File: TestTaskAssignment.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testGenericTaskInstanceGroup() throws InterruptedException {
  // Disable the only instance can be assigned.
  String queueName = TestHelper.getTestMethodName();
  String jobName = "Job4InstanceGroup";
  JobQueue.Builder queueBuilder = TaskTestUtil.buildJobQueue(queueName);
  JobConfig.Builder jobConfig = new JobConfig.Builder();

  List<TaskConfig> taskConfigs = new ArrayList<TaskConfig>();
  int num_tasks = 3;
  for (int j = 0; j < num_tasks; j++) {
    taskConfigs.add(
        new TaskConfig.Builder().setTaskId("task_" + j).setCommand(MockTask.TASK_COMMAND)
            .build());
  }

  jobConfig.addTaskConfigs(taskConfigs);
  jobConfig.setInstanceGroupTag("TESTTAG1");

  queueBuilder.enqueueJob(jobName, jobConfig);
  _driver.start(queueBuilder.build());

  // Wait 1 sec. The task should not be complete since it is not assigned.
  Thread.sleep(1000L);

  // The task is not assigned so the task state should be null in this case.
  String namedSpaceJob = TaskUtil.getNamespacedJobName(queueName, jobName);

  Assert.assertEquals(_driver.getJobContext(namedSpaceJob).getAssignedParticipant(0),
      _participants[1].getInstanceName());
}
 
Example #29
Source File: TestJobFailureDependence.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testJobDependantFailure() throws Exception {
  String queueName = TestHelper.getTestMethodName();

  // Create a queue
  LOG.info("Starting job-queue: " + queueName);
  JobQueue.Builder queueBuilder = TaskTestUtil.buildJobQueue(queueName, 0, 100);
  // Create and Enqueue jobs
  List<String> currentJobNames = new ArrayList<String>();
  for (int i = 0; i < _numDbs; i++) {
    JobConfig.Builder jobConfig =
        new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(_testDbs.get(i))
            .setTargetPartitionStates(Sets.newHashSet("SLAVE"));
    String jobName = "job" + _testDbs.get(i);
    queueBuilder.enqueueJob(jobName, jobConfig);
    currentJobNames.add(jobName);
  }

  _driver.start(queueBuilder.build());
  _gSetupTool.dropResourceFromCluster(CLUSTER_NAME, _testDbs.get(2));

  // all jobs after failed job should fail too.
  for (int i = 2; i < _numDbs; i++) {
    String namedSpaceJob = String.format("%s_%s", queueName, currentJobNames.get(i));
    _driver.pollForJobState(queueName, namedSpaceJob, TaskState.FAILED);
  }
}
 
Example #30
Source File: TestJobFailureDependence.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testIgnoreJobDependantFailure() throws Exception {
  String queueName = TestHelper.getTestMethodName();

  // Create a queue
  LOG.info("Starting job-queue: " + queueName);
  JobQueue.Builder queueBuilder = TaskTestUtil.buildJobQueue(queueName, 0, 100);
  // Create and Enqueue jobs
  List<String> currentJobNames = new ArrayList<String>();
  for (int i = 0; i < _numDbs; i++) {
    JobConfig.Builder jobConfig =
        new JobConfig.Builder().setCommand(MockTask.TASK_COMMAND).setTargetResource(_testDbs.get(i))
            .setTargetPartitionStates(Sets.newHashSet("SLAVE")).setIgnoreDependentJobFailure(true);
    String jobName = "job" + _testDbs.get(i);
    queueBuilder.enqueueJob(jobName, jobConfig);
    currentJobNames.add(jobName);
  }

  _driver.start(queueBuilder.build());
  _gSetupTool.dropResourceFromCluster(CLUSTER_NAME, _testDbs.get(2));
  String namedSpaceJob2 = String.format("%s_%s", queueName, currentJobNames.get(2));
  _driver.pollForJobState(queueName, namedSpaceJob2, TaskState.FAILED);

  // all jobs after failed job should complete.
  for (int i = 3; i < _numDbs; i++) {
    String namedSpaceJob = String.format("%s_%s", queueName, currentJobNames.get(i));
    _driver.pollForJobState(queueName, namedSpaceJob, TaskState.COMPLETED);
  }
}