org.apache.helix.task.TaskDriver Java Examples

The following examples show how to use org.apache.helix.task.TaskDriver. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: JobAccessor.java    From helix with Apache License 2.0 6 votes vote down vote up
@DELETE
@Path("{jobName}")
public Response deleteJob(@PathParam("clusterId") String clusterId,
    @PathParam("workflowName") String workflowName, @PathParam("jobName") String jobName,
    @QueryParam("force") @DefaultValue("false") String forceDelete) {
  boolean force = Boolean.parseBoolean(forceDelete);
  TaskDriver driver = getTaskDriver(clusterId);

  try {
    driver.deleteJob(workflowName, jobName, force);
  } catch (Exception e) {
    return badRequest(e.getMessage());
  }

  return OK();
}
 
Example #2
Source File: HelixUtils.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
static void waitJobInitialization(
    HelixManager helixManager,
    String workFlowName,
    String jobName,
    long timeoutMillis) throws Exception {
  WorkflowContext workflowContext = TaskDriver.getWorkflowContext(helixManager, workFlowName);

  // If the helix job is deleted from some other thread or a completely external process,
  // method waitJobCompletion() needs to differentiate between the cases where
  // 1) workflowContext did not get initialized ever, in which case we need to keep waiting, or
  // 2) it did get initialized but deleted soon after, in which case we should stop waiting
  // To overcome this issue, we wait here till workflowContext gets initialized
  long start = System.currentTimeMillis();
  while (workflowContext == null || workflowContext.getJobState(TaskUtil.getNamespacedJobName(workFlowName, jobName)) == null) {
    if (System.currentTimeMillis() - start > timeoutMillis) {
      log.error("Job cannot be initialized within {} milliseconds, considered as an error", timeoutMillis);
      throw new JobException("Job cannot be initialized within {} milliseconds, considered as an error");
    }
    workflowContext = TaskDriver.getWorkflowContext(helixManager, workFlowName);
    Thread.sleep(1000);
    log.info("Waiting for work flow initialization.");
  }

  log.info("Work flow {} initialized", workFlowName);
}
 
Example #3
Source File: HelixUtils.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
public static void submitJobToWorkFlow(JobConfig.Builder jobConfigBuilder,
    String workFlowName,
    String jobName,
    TaskDriver helixTaskDriver,
    HelixManager helixManager,
    long workFlowExpiryTime) throws Exception {

  WorkflowConfig workFlowConfig = new WorkflowConfig.Builder().setExpiry(workFlowExpiryTime, TimeUnit.SECONDS).build();
  // Create a work flow for each job with the name being the queue name
  Workflow workFlow = new Workflow.Builder(workFlowName).setWorkflowConfig(workFlowConfig).addJob(jobName, jobConfigBuilder).build();
  // start the workflow
  helixTaskDriver.start(workFlow);
  log.info("Created a work flow {}", workFlowName);

  waitJobInitialization(helixManager, workFlowName, jobName, Long.MAX_VALUE);
}
 
Example #4
Source File: JobAccessor.java    From helix with Apache License 2.0 6 votes vote down vote up
@GET
@Path("{jobName}")
public Response getJob(@PathParam("clusterId") String clusterId,
    @PathParam("workflowName") String workflowName, @PathParam("jobName") String jobName) {
  TaskDriver driver = getTaskDriver(clusterId);
  Map<String, ZNRecord> jobMap = new HashMap<>();


  JobConfig jobConfig = driver.getJobConfig(jobName);
  if (jobConfig != null) {
    jobMap.put(JobProperties.JobConfig.name(), jobConfig.getRecord());
  } else {
    return badRequest(String.format("Job config for %s does not exists", jobName));
  }

  JobContext jobContext =
      driver.getJobContext(jobName);
  jobMap.put(JobProperties.JobContext.name(), null);

  if (jobContext != null) {
    jobMap.put(JobProperties.JobContext.name(), jobContext.getRecord());
  }

  return JSONRepresentation(jobMap);
}
 
Example #5
Source File: TestWorkflowAccessor.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test(dependsOnMethods = "testInvalidGetAndUpdateWorkflowContentStore")
public void testDeleteWorkflow() throws InterruptedException {
  System.out.println("Start test :" + TestHelper.getTestMethodName());
  TaskDriver driver = getTaskDriver(CLUSTER_NAME);

  int currentWorkflowNumbers = driver.getWorkflows().size();

  delete("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_WORKFLOW_NAME,
      Response.Status.OK.getStatusCode());
  delete("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME,
      Response.Status.OK.getStatusCode());

  Thread.sleep(500);
  Assert.assertEquals(driver.getWorkflows().size(), currentWorkflowNumbers - 2);
  System.out.println("End test :" + TestHelper.getTestMethodName());
}
 
Example #6
Source File: TestBatchAddJobs.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test
public void testBatchAddJobs() throws Exception {
  TaskDriver driver = new TaskDriver(_gZkClient, CLUSTER_NAME);
  driver.createQueue(new JobQueue.Builder(QUEUE_NAME).build());
  for (int i = 0; i < 10; i++) {
    _submitJobTasks.add(new SubmitJobTask(ZK_ADDR, i));
    _submitJobTasks.get(i).start();
  }

  WorkflowConfig workflowConfig = driver.getWorkflowConfig(QUEUE_NAME);
  while (workflowConfig.getJobDag().getAllNodes().size() < 100) {
    Thread.sleep(50);
    driver.getWorkflowConfig(QUEUE_NAME);
  }

  JobDag dag = workflowConfig.getJobDag();
  String currentJob = dag.getAllNodes().iterator().next();
  while (dag.getDirectChildren(currentJob).size() > 0) {
    String childJob = dag.getDirectChildren(currentJob).iterator().next();
    if (!getPrefix(currentJob).equals(getPrefix(childJob))
        && currentJob.charAt(currentJob.length() - 1) != '9') {
      Assert.fail();
    }
    currentJob = childJob;
  }
}
 
Example #7
Source File: TestWorkflowAccessor.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test(dependsOnMethods = "testCreateWorkflow")
public void testUpdateWorkflow() {
  System.out.println("Start test :" + TestHelper.getTestMethodName());
  TaskDriver driver = getTaskDriver(CLUSTER_NAME);

  Entity entity = Entity.entity("", MediaType.APPLICATION_JSON_TYPE);
  post("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME,
      ImmutableMap.of("command", "stop"), entity, Response.Status.OK.getStatusCode());
  Assert
      .assertEquals(driver.getWorkflowConfig(TEST_QUEUE_NAME).getTargetState(), TargetState.STOP);

  post("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME,
      ImmutableMap.of("command", "resume"), entity, Response.Status.OK.getStatusCode());
  Assert.assertEquals(driver.getWorkflowConfig(TEST_QUEUE_NAME).getTargetState(),
      TargetState.START);
  System.out.println("End test :" + TestHelper.getTestMethodName());
}
 
Example #8
Source File: HelixUtils.java    From incubator-gobblin with Apache License 2.0 6 votes vote down vote up
static boolean isJobFinished(String workflowName, String jobName, HelixManager helixManager) {
  WorkflowContext workflowContext = TaskDriver.getWorkflowContext(helixManager, workflowName);
  if (workflowContext == null) {
    // this workflow context doesn't exist, considered as finished.
    return true;
  }

  TaskState jobState = workflowContext.getJobState(TaskUtil.getNamespacedJobName(workflowName, jobName));
  switch (jobState) {
    case STOPPED:
    case FAILED:
    case COMPLETED:
    case ABORTED:
    case TIMED_OUT:
      return true;
    default:
      return false;
  }
}
 
Example #9
Source File: JobAccessor.java    From helix with Apache License 2.0 6 votes vote down vote up
@GET
public Response getJobs(@PathParam("clusterId") String clusterId,
    @PathParam("workflowName") String workflowName) {
  TaskDriver driver = getTaskDriver(clusterId);
  WorkflowConfig workflowConfig = driver.getWorkflowConfig(workflowName);
  ObjectNode root = JsonNodeFactory.instance.objectNode();

  if (workflowConfig == null) {
    return badRequest(String.format("Workflow %s is not found!", workflowName));
  }

  Set<String> jobs = workflowConfig.getJobDag().getAllNodes();
  root.put(Properties.id.name(), JobProperties.Jobs.name());
  ArrayNode jobsNode = root.putArray(JobProperties.Jobs.name());

  if (jobs != null) {
    jobsNode.addAll((ArrayNode) OBJECT_MAPPER.valueToTree(jobs));
  }
  return JSONRepresentation(root);
}
 
Example #10
Source File: TestJobAccessor.java    From helix with Apache License 2.0 6 votes vote down vote up
@Test(dependsOnMethods = "testInvalidGetAndUpdateJobContentStore")
public void testDeleteJob() throws InterruptedException {
  System.out.println("Start test :" + TestHelper.getTestMethodName());
  TaskDriver driver = getTaskDriver(CLUSTER_NAME);
  driver.waitToStop(TEST_QUEUE_NAME, 5000);
  delete("clusters/" + CLUSTER_NAME + "/workflows/" + TEST_QUEUE_NAME + "/jobs/" + TEST_JOB_NAME,
      Response.Status.OK.getStatusCode());

  String jobName = TaskUtil.getNamespacedJobName(TEST_QUEUE_NAME, TEST_JOB_NAME);
  JobConfig jobConfig = driver.getJobConfig(jobName);

  Assert.assertNull(jobConfig);

  WorkflowConfig workflowConfig = driver.getWorkflowConfig(TEST_QUEUE_NAME);
  Assert.assertTrue(!workflowConfig.getJobDag().getAllNodes().contains(jobName));
  System.out.println("End test :" + TestHelper.getTestMethodName());
}
 
Example #11
Source File: TestStopAndResumeQueue.java    From helix with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public void beforeClass() throws Exception {
  _numPartitions = 1;
  _numNodes = 3;
  super.beforeClass();
  _manager = HelixManagerFactory.getZKHelixManager(CLUSTER_NAME, "Admin",
      InstanceType.ADMINISTRATOR, ZK_ADDR);
  _manager.connect();
  _driver = new TaskDriver(_manager);
}
 
Example #12
Source File: TestTaskRebalancer.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testRepeatedWorkflow() throws Exception {
  String workflowName = "SomeWorkflow";
  Workflow flow =
      WorkflowGenerator.generateDefaultRepeatedJobWorkflowBuilder(workflowName).build();
  new TaskDriver(_manager).start(flow);

  // Wait until the workflow completes
  _driver.pollForWorkflowState(workflowName, TaskState.COMPLETED);

  // Assert completion for all tasks within two minutes
  for (String task : flow.getJobConfigs().keySet()) {
    _driver.pollForJobState(workflowName, task, TaskState.COMPLETED);
  }
}
 
Example #13
Source File: TestTaskSchedulingTwoCurrentStates.java    From helix with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public void beforeClass() throws Exception {
  _numPartitions = 1;
  _numNodes = 3;
  super.beforeClass();
  _manager = HelixManagerFactory.getZKHelixManager(CLUSTER_NAME, "Admin",
      InstanceType.ADMINISTRATOR, ZK_ADDR);

  // Stop participants that have been started in super class
  for (int i = 0; i < _numNodes; i++) {
    super.stopParticipant(i);
    Assert.assertFalse(_participants[i].isConnected());
  }

  // Start new participants that have new TaskStateModel (NewMockTask) information
  _participants = new MockParticipantManager[_numNodes];
  for (int i = 0; i < _numNodes; i++) {
    Map<String, TaskFactory> taskFactoryReg = new HashMap<>();
    taskFactoryReg.put(NewMockTask.TASK_COMMAND, NewMockTask::new);
    String instanceName = PARTICIPANT_PREFIX + "_" + (_startPort + i);
    _participants[i] = new MockParticipantManager(ZK_ADDR, CLUSTER_NAME, instanceName);

    // Register a Task state model factory.
    StateMachineEngine stateMachine = _participants[i].getStateMachineEngine();
    stateMachine.registerStateModelFactory("Task",
        new TaskStateModelFactory(_participants[i], taskFactoryReg));
    _participants[i].syncStart();
  }

  _manager.connect();
  _driver = new TaskDriver(_manager);
}
 
Example #14
Source File: JobAccessor.java    From helix with Apache License 2.0 5 votes vote down vote up
@GET
@Path("{jobName}/configs")
public Response getJobConfig(@PathParam("clusterId") String clusterId,
    @PathParam("workflowName") String workflowName, @PathParam("jobName") String jobName) {
  TaskDriver driver = getTaskDriver(clusterId);

  JobConfig jobConfig = driver.getJobConfig(jobName);
  if (jobConfig != null) {
    return JSONRepresentation(jobConfig.getRecord());
  }
  return badRequest("Job config for " + jobName + " does not exists");
}
 
Example #15
Source File: TestTaskStopQueue.java    From helix with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public void beforeClass() throws Exception {
  super.beforeClass();
  _manager = HelixManagerFactory.getZKHelixManager(CLUSTER_NAME, "Admin",
      InstanceType.ADMINISTRATOR, ZK_ADDR);
  _manager.connect();
  _driver = new TaskDriver(_manager);
  _admin = _gSetupTool.getClusterManagementTool();
}
 
Example #16
Source File: TestStoppingQueueFailToStop.java    From helix with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public void beforeClass() throws Exception {
  _numPartitions = 1;
  _numNodes = 3;
  super.beforeClass();
  _manager = HelixManagerFactory.getZKHelixManager(CLUSTER_NAME, "Admin",
      InstanceType.ADMINISTRATOR, ZK_ADDR);

  // Stop participants that have been started in super class
  for (int i = 0; i < _numNodes; i++) {
    super.stopParticipant(i);
    Assert.assertFalse(_participants[i].isConnected());
  }

  // Start new participants that have new TaskStateModel (NewMockTask) information
  _participants = new MockParticipantManager[_numNodes];
  for (int i = 0; i < _numNodes; i++) {
    Map<String, TaskFactory> taskFactoryReg = new HashMap<>();
    taskFactoryReg.put(NewMockTask.TASK_COMMAND, NewMockTask::new);
    String instanceName = PARTICIPANT_PREFIX + "_" + (_startPort + i);
    _participants[i] = new MockParticipantManager(ZK_ADDR, CLUSTER_NAME, instanceName);

    // Register a Task state model factory.
    StateMachineEngine stateMachine = _participants[i].getStateMachineEngine();
    stateMachine.registerStateModelFactory("Task",
        new TaskStateModelFactory(_participants[i], taskFactoryReg));
    _participants[i].syncStart();
  }

  _manager.connect();
  _driver = new TaskDriver(_manager);
}
 
Example #17
Source File: TaskTestUtil.java    From helix with Apache License 2.0 5 votes vote down vote up
public static void pollForEmptyJobState(final TaskDriver driver, final String workflowName,
    final String jobName) throws Exception {
  final String namespacedJobName = String.format("%s_%s", workflowName, jobName);
  boolean succeed = TestHelper.verify(new TestHelper.Verifier() {

    @Override public boolean verify() throws Exception {
      WorkflowContext ctx = driver.getWorkflowContext(workflowName);
      return ctx == null || ctx.getJobState(namespacedJobName) == null
          || ctx.getJobState(namespacedJobName) == TaskState.NOT_STARTED;
    }
  }, _default_timeout);
  Assert.assertTrue(succeed);
}
 
Example #18
Source File: WorkflowAccessor.java    From helix with Apache License 2.0 5 votes vote down vote up
@GET
@Path("{workflowId}/context")
public Response getWorkflowContext(@PathParam("clusterId") String clusterId,
    @PathParam("workflowId") String workflowId) {
  TaskDriver taskDriver = getTaskDriver(clusterId);
  WorkflowContext workflowContext = taskDriver.getWorkflowContext(workflowId);
  ObjectNode workflowContextNode = JsonNodeFactory.instance.objectNode();
  if (workflowContext != null) {
    getWorkflowContextNode(workflowContextNode, workflowContext.getRecord());
  }

  return JSONRepresentation(workflowContextNode);
}
 
Example #19
Source File: TestForceDeleteWorkflow.java    From helix with Apache License 2.0 5 votes vote down vote up
@BeforeClass
public void beforeClass() throws Exception {
  super.beforeClass();

  // Stop participants that have been started in super class
  for (int i = 0; i < _numNodes; i++) {
    super.stopParticipant(i);
  }

  // Check that participants are actually stopped
  for (int i = 0; i < _numNodes; i++) {
    Assert.assertFalse(_participants[i].isConnected());
  }

  // Start new participants that have new TaskStateModel (DelayedStopTask) information
  _participants = new MockParticipantManager[_numNodes];
  for (int i = 0; i < _numNodes; i++) {
    Map<String, TaskFactory> taskFactoryReg = new HashMap<>();
    taskFactoryReg.put(DelayedStopTask.TASK_COMMAND, DelayedStopTask::new);
    String instanceName = PARTICIPANT_PREFIX + "_" + (_startPort + i);
    _participants[i] = new MockParticipantManager(ZK_ADDR, CLUSTER_NAME, instanceName);

    // Register a Task state model factory.
    StateMachineEngine stateMachine = _participants[i].getStateMachineEngine();
    stateMachine.registerStateModelFactory("Task",
        new TaskStateModelFactory(_participants[i], taskFactoryReg));
    _participants[i].syncStart();
  }

  _manager = HelixManagerFactory.getZKHelixManager(CLUSTER_NAME, "Admin",
      InstanceType.ADMINISTRATOR, ZK_ADDR);
  _manager.connect();

  _driver = new TaskDriver(_manager);

  _admin = _gSetupTool.getClusterManagementTool();
}
 
Example #20
Source File: TestStopWorkflow.java    From helix with Apache License 2.0 5 votes vote down vote up
/**
 * Tests that stopping a workflow does result in its task ending up in STOPPED state.
 * @throws InterruptedException
 */
@Test(dependsOnMethods = "testStopWorkflow")
public void testStopTask() throws Exception {
  stopTestSetup(1);

  String workflowName = TestHelper.getTestMethodName();
  Workflow.Builder workflowBuilder = new Workflow.Builder(workflowName);
  WorkflowConfig.Builder configBuilder = new WorkflowConfig.Builder(workflowName);
  configBuilder.setAllowOverlapJobAssignment(true);
  workflowBuilder.setWorkflowConfig(configBuilder.build());

  for (int i = 0; i < 1; i++) {
    List<TaskConfig> taskConfigs = new ArrayList<>();
    taskConfigs.add(new TaskConfig("StopTask", new HashMap<>()));
    JobConfig.Builder jobConfigBulider = new JobConfig.Builder().setCommand("Dummy")
        .addTaskConfigs(taskConfigs).setJobCommandConfigMap(new HashMap<>());
    workflowBuilder.addJob("JOB" + i, jobConfigBulider);
  }

  _driver.start(workflowBuilder.build());
  _driver.pollForWorkflowState(workflowName, TaskState.IN_PROGRESS);

  // Stop the workflow
  _driver.stop(workflowName);
  _driver.pollForWorkflowState(workflowName, TaskState.STOPPED);

  Assert.assertEquals(TaskDriver.getWorkflowContext(_manager, workflowName).getWorkflowState(),
      TaskState.STOPPED);

  cleanupParticipants(1);
}
 
Example #21
Source File: TestQuotaConstraintSkipWorkflowAssignment.java    From helix with Apache License 2.0 5 votes vote down vote up
@Test
public void testQuotaConstraintSkipWorkflowAssignment() throws Exception {
  ClusterEvent event = new ClusterEvent(ClusterEventType.Unknown);
  WorkflowControllerDataProvider cache = new WorkflowControllerDataProvider(CLUSTER_NAME);
  JobConfig.Builder job = new JobConfig.Builder();

  job.setJobCommandConfigMap(Collections.singletonMap(MockTask.JOB_DELAY, "100000"));
  TaskDriver driver = new TaskDriver(_manager);
  for (int i = 0; i < 10; i++) {
    Workflow.Builder workflow = new Workflow.Builder("Workflow" + i);
    job.setWorkflow("Workflow" + i);
    TaskConfig taskConfig =
        new TaskConfig(MockTask.TASK_COMMAND, new HashMap<String, String>(), null, null);
    job.addTaskConfigMap(Collections.singletonMap(taskConfig.getId(), taskConfig));
    job.setJobId(TaskUtil.getNamespacedJobName("Workflow" + i, "JOB"));
    workflow.addJob("JOB", job);
    driver.start(workflow.build());
  }
  ConfigAccessor accessor = new ConfigAccessor(_gZkClient);
  ClusterConfig clusterConfig = accessor.getClusterConfig(CLUSTER_NAME);
  clusterConfig.setTaskQuotaRatio(AssignableInstance.DEFAULT_QUOTA_TYPE, 3);
  clusterConfig.setTaskQuotaRatio("OtherType", 37);
  accessor.setClusterConfig(CLUSTER_NAME, clusterConfig);
  cache.refresh(_manager.getHelixDataAccessor());
  event.addAttribute(AttributeName.ControllerDataProvider.name(), cache);
  event.addAttribute(AttributeName.helixmanager.name(), _manager);
  runStage(event, new ResourceComputationStage());
  runStage(event, new CurrentStateComputationStage());
  runStage(event, new TaskSchedulingStage());
  Assert.assertTrue(!cache.getAssignableInstanceManager()
      .hasGlobalCapacity(AssignableInstance.DEFAULT_QUOTA_TYPE));
  BestPossibleStateOutput bestPossibleStateOutput =
      event.getAttribute(AttributeName.BEST_POSSIBLE_STATE.name());
  Assert.assertTrue(bestPossibleStateOutput.getStateMap().size() == 3);
}
 
Example #22
Source File: TestBatchAddJobs.java    From helix with Apache License 2.0 5 votes vote down vote up
public SubmitJobTask(String zkAddress, int index) throws Exception {
  HelixManager manager = HelixManagerFactory.getZKHelixManager(CLUSTER_NAME, "Administrator",
      InstanceType.ADMINISTRATOR, zkAddress);
  manager.connect();
  _driver = new TaskDriver(manager);
  _jobPrefixName = "JOB_" + index + "#";
}
 
Example #23
Source File: TaskTestUtil.java    From helix with Apache License 2.0 5 votes vote down vote up
public static WorkflowContext pollForWorkflowContext(TaskDriver driver, String workflowResource)
    throws InterruptedException {
  // Wait for completion.
  long st = System.currentTimeMillis();
  WorkflowContext ctx;
  do {
    ctx = driver.getWorkflowContext(workflowResource);
    Thread.sleep(100);
  } while (ctx == null && System.currentTimeMillis() < st + _default_timeout);
  Assert.assertNotNull(ctx);
  return ctx;
}
 
Example #24
Source File: JobAccessor.java    From helix with Apache License 2.0 5 votes vote down vote up
@GET
@Path("{jobName}/context")
public Response getJobContext(@PathParam("clusterId") String clusterId,
    @PathParam("workflowName") String workflowName, @PathParam("jobName") String jobName) {
  TaskDriver driver = getTaskDriver(clusterId);

  JobContext jobContext =
      driver.getJobContext(jobName);
  if (jobContext != null) {
    return JSONRepresentation(jobContext.getRecord());
  }
  return badRequest("Job context for " + jobName + " does not exists");
}
 
Example #25
Source File: GobblinHelixTaskTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
/**
 * To test against org.apache.gobblin.cluster.GobblinHelixTask#getPartitionForHelixTask(org.apache.helix.task.TaskDriver)
 * we need to assign the right partition id for each helix task, which would be queried from taskDriver.
 * This method encapsulate all mocking steps for taskDriver object to return expected value.
 */
private TaskDriver createTaskDriverWithMockedAttributes(TaskCallbackContext taskCallbackContext,
    TaskConfig taskConfig) {
  String helixJobId = Joiner.on("_").join(TestHelper.TEST_JOB_ID, TestHelper.TEST_JOB_ID);
  JobConfig jobConfig = Mockito.mock(JobConfig.class);
  when(jobConfig.getJobId()).thenReturn(helixJobId);
  when(taskCallbackContext.getJobConfig()).thenReturn(jobConfig);
  JobContext mockJobContext = Mockito.mock(JobContext.class);
  Map<String, Integer> taskIdPartitionMap = ImmutableMap.of(taskConfig.getId(), 0);
  when(mockJobContext.getTaskIdPartitionMap()).thenReturn(taskIdPartitionMap);
  TaskDriver taskDriver = Mockito.mock(TaskDriver.class);
  when(taskDriver.getJobContext(Mockito.anyString())).thenReturn(mockJobContext);
  return taskDriver;
}
 
Example #26
Source File: WorkflowAccessor.java    From helix with Apache License 2.0 5 votes vote down vote up
@GET
public Response getWorkflows(@PathParam("clusterId") String clusterId) {
  TaskDriver taskDriver = getTaskDriver(clusterId);
  Map<String, WorkflowConfig> workflowConfigMap = taskDriver.getWorkflows();
  Map<String, List<String>> dataMap = new HashMap<>();
  dataMap.put(WorkflowProperties.Workflows.name(), new ArrayList<>(workflowConfigMap.keySet()));

  return JSONRepresentation(dataMap);
}
 
Example #27
Source File: WorkflowAccessor.java    From helix with Apache License 2.0 5 votes vote down vote up
@GET
@Path("{workflowId}")
public Response getWorkflow(@PathParam("clusterId") String clusterId,
    @PathParam("workflowId") String workflowId) {
  TaskDriver taskDriver = getTaskDriver(clusterId);
  WorkflowConfig workflowConfig = taskDriver.getWorkflowConfig(workflowId);
  WorkflowContext workflowContext = taskDriver.getWorkflowContext(workflowId);

  ObjectNode root = JsonNodeFactory.instance.objectNode();
  TextNode id = JsonNodeFactory.instance.textNode(workflowId);
  root.put(Properties.id.name(), id);

  ObjectNode workflowConfigNode = JsonNodeFactory.instance.objectNode();
  ObjectNode workflowContextNode = JsonNodeFactory.instance.objectNode();

  if (workflowConfig != null) {
    getWorkflowConfigNode(workflowConfigNode, workflowConfig.getRecord());
  }

  if (workflowContext != null) {
    getWorkflowContextNode(workflowContextNode, workflowContext.getRecord());
  }

  root.put(WorkflowProperties.WorkflowConfig.name(), workflowConfigNode);
  root.put(WorkflowProperties.WorkflowContext.name(), workflowContextNode);

  JobDag jobDag = workflowConfig.getJobDag();
  ArrayNode jobs = OBJECT_MAPPER.valueToTree(jobDag.getAllNodes());
  ObjectNode parentJobs = OBJECT_MAPPER.valueToTree(jobDag.getChildrenToParents());
  root.put(WorkflowProperties.Jobs.name(), jobs);
  root.put(WorkflowProperties.ParentJobs.name(), parentJobs);
  root.put(WorkflowProperties.LastScheduledTask.name(), OBJECT_MAPPER.valueToTree(taskDriver.getLastScheduledTaskExecutionInfo(workflowId)));
  return JSONRepresentation(root);
}
 
Example #28
Source File: WorkflowAccessor.java    From helix with Apache License 2.0 5 votes vote down vote up
@DELETE
@Path("{workflowId}")
public Response deleteWorkflow(@PathParam("clusterId") String clusterId,
    @PathParam("workflowId") String workflowId,
    @QueryParam("force") @DefaultValue("false") String forceDelete) {
  boolean force = Boolean.valueOf(forceDelete);
  TaskDriver driver = getTaskDriver(clusterId);
  try {
    driver.delete(workflowId, force);
  } catch (HelixException e) {
    return badRequest(String
        .format("Failed to delete workflow %s for reason : %s", workflowId, e.getMessage()));
  }
  return OK();
}
 
Example #29
Source File: GobblinHelixJobLauncherTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
private void waitForWorkFlowStartup(TaskDriver taskDriver, String workflow) {
  for (int i = 0; i < 5; i++) {
    WorkflowConfig workflowConfig  = taskDriver.getWorkflowConfig(workflow);

    if (workflowConfig != null) {
      break;
    }

    try {
      Thread.sleep(1000);
    } catch (InterruptedException e) {
    }
  }
}
 
Example #30
Source File: GobblinHelixJobLauncherTest.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
private void waitForWorkFlowCleanup(TaskDriver taskDriver, String queueName) {
   for (int i = 0; i < 60; i++) {
     WorkflowConfig workflowConfig  = taskDriver.getWorkflowConfig(queueName);

     if (workflowConfig == null) {
       break;
     }

     try {
       Thread.sleep(1000);
     } catch (InterruptedException e) {
     }
   }
}