org.apache.sqoop.model.MJob Java Examples

The following examples show how to use org.apache.sqoop.model.MJob. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: JdbcRepository.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public void updateJob(final MJob job, RepositoryTransaction tx) {
  doWithConnection(new DoWithConnection() {
    @Override
    public Object doIt(Connection conn) {
     if(!job.hasPersistenceId()) {
        throw new SqoopException(RepositoryError.JDBCREPO_0019);
      }
      if(!handler.existsJob(job.getPersistenceId(), conn)) {
        throw new SqoopException(RepositoryError.JDBCREPO_0020,
          "Invalid id: " + job.getPersistenceId());
      }

      handler.updateJob(job, conn);
      return null;
    }
  }, (JdbcRepositoryTransaction) tx);
}
 
Example #2
Source File: TestJobHandling.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test
public void testEnableAndDisableJob() throws Exception {
  // disable job 1
  handler.enableJob(1, false, provider.getConnection());

  MJob retrieved = handler.findJob(1, provider.getConnection());
  assertNotNull(retrieved);
  assertEquals(false, retrieved.getEnabled());

  // enable job 1
  handler.enableJob(1, true, provider.getConnection());

  retrieved = handler.findJob(1, provider.getConnection());
  assertNotNull(retrieved);
  assertEquals(true, retrieved.getEnabled());
}
 
Example #3
Source File: TestJobHandling.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test
public void testCreateJob() throws Exception {
  Assert.assertEquals(provider.rowCount(new TableName("SQOOP", "SQ_JOB")), 2);
  Assert.assertEquals(provider.rowCount(new TableName("SQOOP", "SQ_JOB_INPUT")), 12);

  MJob retrieved = handler.findJob(1, provider.getConnection());
  assertEquals(1, retrieved.getPersistenceId());

  List<MConfig> configs;
  configs = retrieved.getJobConfig(Direction.FROM).getConfigs();
  assertEquals("Value1", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(0).getInputs().get(1).getValue());
  configs = retrieved.getJobConfig(Direction.TO).getConfigs();
  assertEquals("Value2", configs.get(1).getInputs().get(0).getValue());
  assertNull(configs.get(0).getInputs().get(1).getValue());

  configs = retrieved.getDriverConfig().getConfigs();
  assertEquals("Value1", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(0).getInputs().get(1).getValue());
  assertEquals("Value2", configs.get(1).getInputs().get(0).getValue());
  assertNull(configs.get(1).getInputs().get(1).getValue());
}
 
Example #4
Source File: TestJobHandling.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test
public void testExistsJob() throws Exception {
  assertTrue(handler.existsJob(1, provider.getConnection()));
  assertTrue(handler.existsJob(2, provider.getConnection()));
  assertFalse(handler.existsJob(3, provider.getConnection()));

  // Delete jobs
  for (MJob job : handler.findJobs(provider.getConnection())) {
    handler.deleteJob(job.getPersistenceId(), provider.getConnection());
  }

  // There shouldn't be anything on empty repository
  assertFalse(handler.existsJob(1, provider.getConnection()));
  assertFalse(handler.existsJob(2, provider.getConnection()));
  assertFalse(handler.existsJob(3, provider.getConnection()));
}
 
Example #5
Source File: CommonRepositoryHandler.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public MJob findJob(String name, Connection conn) {
  PreparedStatement stmt = null;
  try {
    stmt = conn.prepareStatement(crudQueries.getStmtSelectJobSingleByName());
    stmt.setString(1, name);

    List<MJob> jobs = loadJobs(stmt, conn);

    if (jobs.size() != 1) {
      return null;
    }

    // Return the first and only one link object
    return jobs.get(0);

  } catch (SQLException ex) {
    logException(ex, name);
    throw new SqoopException(CommonRepositoryError.COMMON_0028, ex);
  } finally {
    closeStatements(stmt);
  }
}
 
Example #6
Source File: TestSubmissionHandling.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
private void loadSubmissions() throws Exception {
  MJob jobA = handler.findJob(JOB_A_NAME, provider.getConnection());
  MJob jobB = handler.findJob(JOB_B_NAME, provider.getConnection());

  MSubmission submissionA = getSubmission(jobA, SubmissionStatus.RUNNING);
  submissionA.getCounters().getCounterGroup("test-1").addCounter(new Counter("counter-1"));
  submissionA.getCounters().getCounterGroup("test-1").addCounter(new Counter("counter-2"));
  submissionA.getCounters().getCounterGroup("test-1").getCounter("counter-1").setValue(300);
  MSubmission submissionB = getSubmission(jobA, SubmissionStatus.SUCCEEDED);
  MSubmission submissionC = getSubmission(jobB, SubmissionStatus.FAILED);
  MSubmission submissionD = getSubmission(jobB, SubmissionStatus.UNKNOWN);
  handler.createSubmission(submissionA, provider.getConnection());
  handler.createSubmission(submissionB, provider.getConnection());
  handler.createSubmission(submissionC, provider.getConnection());
  handler.createSubmission(submissionD, provider.getConnection());
}
 
Example #7
Source File: ShowJobFunction.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
private void displayJob(MJob job) {
  DateFormat formatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);

  printlnResource(
    Constants.RES_SHOW_PROMPT_JOB_INFO,
    job.getPersistenceId(),
    job.getName(),
    job.getEnabled(),
    job.getCreationUser(),
    formatter.format(job.getCreationDate()),
    job.getLastUpdateUser(),
    formatter.format(job.getLastUpdateDate())
  );
  printlnResource(Constants.RES_SHOW_PROMPT_JOB_LID_CID_INFO,
      job.getLinkId(Direction.FROM),
      job.getConnectorId(Direction.FROM));

  displayConfig(job.getJobConfig(Direction.FROM).getConfigs(),
               client.getConnectorConfigBundle(job.getConnectorId(Direction.FROM)));
  displayConfig(job.getDriverConfig().getConfigs(),
               client.getDriverConfigBundle());
  displayConfig(job.getJobConfig(Direction.TO).getConfigs(),
               client.getConnectorConfigBundle(job.getConnectorId(Direction.TO)));
}
 
Example #8
Source File: ConfigFiller.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * Fill job object based on user input.
 *
 * @param reader Associated console reader object
 * @param job Job that user is suppose to fill in
 * @param fromConfigBundle Connector resource bundle
 * @param driverConfigBundle Driver config resource bundle
 * @return True if we filled all inputs, false if user has stopped processing
 * @throws IOException
 */
public static boolean fillJobWithBundle(ConsoleReader reader,
                              MJob job,
                              ResourceBundle fromConfigBundle,
                              ResourceBundle toConfigBundle,
                              ResourceBundle driverConfigBundle)
                              throws IOException {

  job.setName(getName(reader, job.getName()));

  return fillJobConfigWithBundle(reader,
                   job.getJobConfig(Direction.FROM).getConfigs(),
                   fromConfigBundle,
                   job.getJobConfig(Direction.TO).getConfigs(),
                   toConfigBundle,
                   job.getDriverConfig().getConfigs(),
                   driverConfigBundle);
}
 
Example #9
Source File: ConfigDisplayer.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * Method prints the warning message of WARNING status
 * @param entity - link or job instance
 */
public static void displayConfigWarning(MAccountableEntity entity) {
  List<MConfig> configList = new ArrayList<MConfig>();
  boolean showMessage = true;
  if (entity instanceof MLink) {
    MLink link = (MLink) entity;
    configList.addAll(link.getConnectorLinkConfig().getConfigs());
  } else if(entity instanceof MJob) {
    MJob job = (MJob) entity;
    configList.addAll(job.getJobConfig(Direction.FROM).getConfigs());
    configList.addAll(job.getDriverConfig().getConfigs());
    configList.addAll(job.getJobConfig(Direction.TO).getConfigs());
  }
  for(MConfig config : configList) {
    if(config.getValidationStatus() == Status.WARNING) {
      if(showMessage) {
        print("\n@|yellow %s|@\n", resourceString(Constants.RES_CONFIG_DISPLAYER_FORM_WARNING));
        showMessage = false;
      }
      for(Message message : config.getValidationMessages()) {
        ConfigFiller.warningMessage(message.getMessage());
      }
    }
  }
}
 
Example #10
Source File: TestJobHandling.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test
public void testEnableAndDisableJob() throws Exception {
  loadJobsForLatestVersion();

  // disable job 1
  handler.enableJob(1, false, derbyConnection);

  MJob retrieved = handler.findJob(1, derbyConnection);
  assertNotNull(retrieved);
  assertEquals(false, retrieved.getEnabled());

  // enable job 1
  handler.enableJob(1, true, derbyConnection);

  retrieved = handler.findJob(1, derbyConnection);
  assertNotNull(retrieved);
  assertEquals(true, retrieved.getEnabled());
}
 
Example #11
Source File: OutputDirectoryTest.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
public void assertJobSubmissionFailure(MJob job, String ...fragments) throws Exception {
  // Try to execute the job and verify that the it was not successful
  try {
    executeJob(job);
    fail("Expected failure in the job submission.");
  } catch (SqoopException ex) {
    // Top level exception should be CLIENT_0001
    assertEquals(ClientError.CLIENT_0001, ex.getErrorCode());

    // We can directly verify the ErrorCode from SqoopException as client side
    // is not rebuilding SqoopExceptions per missing ErrorCodes. E.g. the cause
    // will be generic Throwable and not SqoopException instance.
    Throwable cause = ex.getCause();
    assertNotNull(cause);

    for(String fragment : fragments) {
      assertTrue(cause.getMessage().contains(fragment), "Expected fragment " + fragment + " in error message " + cause.getMessage());
    }
  }
}
 
Example #12
Source File: TestJobHandling.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test
public void testFindJobsByConnector() throws Exception {
  List<MJob> list;
  // Load empty list on empty repository
  list = handler.findJobs(derbyConnection);
  assertEquals(0, list.size());
  loadJobsForLatestVersion();

  // Load all 4 jobs on loaded repository
  list = handler.findJobsForConnector(1, derbyConnection);
  assertEquals(4, list.size());

  assertEquals("JA0", list.get(0).getName());
  assertEquals("JB0", list.get(1).getName());
  assertEquals("JC0", list.get(2).getName());
  assertEquals("JD0", list.get(3).getName());
}
 
Example #13
Source File: TestJobHandling.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test
public void testFindJobs() throws Exception {
  List<MJob> list;
  // Load empty list on empty repository
  list = handler.findJobs(derbyConnection);
  assertEquals(0, list.size());
  loadJobsForLatestVersion();

  // Load all two links on loaded repository
  list = handler.findJobs(derbyConnection);
  assertEquals(4, list.size());

  assertEquals("JA0", list.get(0).getName());
  assertEquals("JB0", list.get(1).getName());
  assertEquals("JC0", list.get(2).getName());
  assertEquals("JD0", list.get(3).getName());
}
 
Example #14
Source File: RangerSqoopAuthorizerTest.java    From ranger with Apache License 2.0 5 votes vote down vote up
/**
 * Help function: init sqoop to enable ranger authentication
 */
private static void initSqoopAuth() throws IOException, ClassNotFoundException, IllegalAccessException,
		InstantiationException {
	// init sqoop configruation
	String basedir = System.getProperty("basedir");
	if (basedir == null) {
		basedir = new File(".").getCanonicalPath();
	}
	String sqoopConfigDirPath = basedir + "/src/test/resources/";
	System.setProperty(ConfigurationConstants.SYSPROP_CONFIG_DIR, sqoopConfigDirPath);
	SqoopConfiguration.getInstance().initialize();

	// init sqoop authorization
	AuthorizationManager.getInstance().initialize();

	// mock sqoop class for authentication
	RepositoryManager repositoryManager = mock(RepositoryManager.class);
	RepositoryManager.setInstance(repositoryManager);
	Repository repository = mock(Repository.class);
	when(repositoryManager.getRepository()).thenReturn(repository);

	MLink link = mock(MLink.class);
	when(repository.findLink(anyString())).thenReturn(link);
	MJob job = mock(MJob.class);
	when(repository.findJob(anyString())).thenReturn(job);

	// mock user "zhangqiang" as the creator of any link and any job
	when(link.getCreationUser()).thenReturn(ZHANGQIANG);
	when(job.getCreationUser()).thenReturn(ZHANGQIANG);
}
 
Example #15
Source File: TestJobHandling.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test
public void testFindJob() throws Exception {
  // Let's try to find non existing job
  try {
    handler.findJob(1, derbyConnection);
    fail();
  } catch(SqoopException ex) {
    assertEquals(CommonRepositoryError.COMMON_0027, ex.getErrorCode());
  }

  loadJobsForLatestVersion();

  MJob firstJob = handler.findJob(1, derbyConnection);
  assertNotNull(firstJob);
  assertEquals(1, firstJob.getPersistenceId());
  assertEquals("JA0", firstJob.getName());

  List<MConfig> configs;

  configs = firstJob.getJobConfig(Direction.FROM).getConfigs();
  assertEquals(2, configs.size());
  assertEquals("Value5", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(0).getInputs().get(1).getValue());
  assertEquals("Value5", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(1).getInputs().get(1).getValue());

  configs = firstJob.getJobConfig(Direction.TO).getConfigs();
  assertEquals(2, configs.size());
  assertEquals("Value9", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(0).getInputs().get(1).getValue());
  assertEquals("Value9", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(1).getInputs().get(1).getValue());

  configs = firstJob.getDriverConfig().getConfigs();
  assertEquals(2, configs.size());
  assertEquals("Value13", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(0).getInputs().get(1).getValue());
  assertEquals("Value15", configs.get(1).getInputs().get(0).getValue());
  assertNull(configs.get(1).getInputs().get(1).getValue());
}
 
Example #16
Source File: TestJdbcRepository.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * Test the exception handling procedure when the database handler fails to
 * update the connector entity
 */
@Test
public void testConnectorConfigUpgradeHandlerWithUpdateConnectorError() {
  MConnector newConnector = connector(1, "1.1");
  MConnector oldConnector = connector(1);

  SqoopConnector sqconnector = mock(SqoopConnector.class);
  when(sqconnector.getConfigurableUpgrader()).thenReturn(connectorUpgraderMock);
  when(connectorMgrMock.getSqoopConnector(anyString())).thenReturn(sqconnector);

  List<MLink> linkList = links(link(1,1), link(2,1));
  List<MJob> jobList = jobs(job(1,1,1,1,1), job(2,1,1,2,1));
  doReturn(linkList).when(repoHandlerMock).findLinksForConnector(anyLong(), any(Connection.class));
  doReturn(jobList).when(repoHandlerMock).findJobsForConnector(anyLong(), any(Connection.class));
  doNothing().when(repoHandlerMock).deleteJobInputs(anyLong(), any(Connection.class));
  doNothing().when(repoHandlerMock).deleteLinkInputs(anyLong(), any(Connection.class));

  SqoopException exception = new SqoopException(RepositoryError.JDBCREPO_0000,
      "update connector error.");
  doThrow(exception).when(repoHandlerMock).upgradeConnectorAndConfigs(any(MConnector.class), any(Connection.class));

  try {
    repoSpy.upgradeConnector(oldConnector, newConnector);
  } catch (SqoopException ex) {
    assertEquals(ex.getMessage(), exception.getMessage());
    verify(repoHandlerMock, times(1)).findLinksForConnector(anyLong(), any(Connection.class));
    verify(repoHandlerMock, times(1)).findJobsForConnector(anyLong(), any(Connection.class));
    verify(repoHandlerMock, times(2)).deleteJobInputs(anyLong(), any(Connection.class));
    verify(repoHandlerMock, times(2)).deleteLinkInputs(anyLong(), any(Connection.class));
    verify(repoHandlerMock, times(1)).upgradeConnectorAndConfigs(any(MConnector.class), any(Connection.class));
    verifyNoMoreInteractions(repoHandlerMock);
    return ;
  }

  fail("Should throw out an exception with message: " + exception.getMessage());
}
 
Example #17
Source File: JdbcRepository.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@SuppressWarnings("unchecked")
@Override
public List<MJob> findJobs() {
 return (List<MJob>) doWithConnection(new DoWithConnection() {
    @Override
    public Object doIt(Connection conn) {
      return handler.findJobs(conn);
    }
  });
}
 
Example #18
Source File: FromRDBMSToKafkaTest.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test
public void testBasic() throws Exception {
  createAndLoadTableCities();

  // Kafka link
  MLink kafkaLink = getClient().createLink("kafka-connector");
  fillKafkaLinkConfig(kafkaLink);
  saveLink(kafkaLink);

  // RDBMS link
  MLink rdbmsLink = getClient().createLink("generic-jdbc-connector");
  fillRdbmsLinkConfig(rdbmsLink);
  saveLink(rdbmsLink);

  // Job creation
  MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), kafkaLink.getPersistenceId());

  // set rdbms "FROM" job config
  fillRdbmsFromConfig(job, "id");

  // set Kafka  "TO" job config
  fillKafkaToConfig(job);

  // driver config
  MDriverConfig driverConfig = job.getDriverConfig();
  driverConfig.getIntegerInput("throttlingConfig.numExtractors").setValue(3);
  saveJob(job);

  executeJob(job);

  // this will assert the content of the array matches the content of the topic
  validateContent(input);
}
 
Example #19
Source File: FromHDFSToKafkaTest.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test
public void testBasic() throws Exception {
  createHdfsFromFile("input-0001",input);

  // Create Kafka link
  MLink kafkaLink = getClient().createLink("kafka-connector");
  fillKafkaLinkConfig(kafkaLink);
  saveLink(kafkaLink);

  // HDFS link
  MLink hdfsLink = getClient().createLink("hdfs-connector");
  fillHdfsLink(hdfsLink);
  saveLink(hdfsLink);

  // Job creation
  MJob job = getClient().createJob(hdfsLink.getPersistenceId(), kafkaLink.getPersistenceId());

  // Job connector configs
  fillHdfsFromConfig(job);
  fillKafkaToConfig(job);

  // driver config
  MDriverConfig driverConfig = job.getDriverConfig();
  driverConfig.getIntegerInput("throttlingConfig.numExtractors").setValue(3);
  saveJob(job);

  executeJob(job);

  // this will assert the content of the array matches the content of the topic
  validateContent(input);
}
 
Example #20
Source File: CommonRepositoryHandler.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public List<MJob> findJobs(Connection conn) {
  PreparedStatement stmt = null;
  try {
    stmt = conn
        .prepareStatement(crudQueries.getStmtSelectJobAll());
    return loadJobs(stmt, conn);
  } catch (SQLException ex) {
    logException(ex);
    throw new SqoopException(CommonRepositoryError.COMMON_0028, ex);
  } finally {
    closeStatements(stmt);
  }
}
 
Example #21
Source File: TestJdbcRepository.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * Test the driverConfig upgrade procedure, when all jobs
 * using the old connector are still valid for the new connector
 */
@Test
public void testDriverConfigUpgradeWithValidJobs() {
  MDriver newDriverConfig = driver();

  when(driverMock.getConfigurableUpgrader()).thenReturn(driverUpgraderMock);
  when(driverMock.getDriverJobConfigurationClass()).thenReturn(ValidConfiguration.class);
  List<MJob> jobList = jobs(job(1,1,1,1,1), job(2,1,1,2,1));

  doReturn(jobList).when(repoSpy).findJobs();
  doNothing().when(repoSpy).updateLink(any(MLink.class), any(RepositoryTransaction.class));
  doNothing().when(repoSpy).updateJob(any(MJob.class), any(RepositoryTransaction.class));
  doNothing().when(repoSpy).upgradeDriverAndConfigs(any(MDriver.class), any(RepositoryTransaction.class));

  repoSpy.upgradeDriver(newDriverConfig);

  InOrder repoOrder = inOrder(repoSpy);
  InOrder txOrder = inOrder(repoTransactionMock);
  InOrder upgraderOrder = inOrder(driverUpgraderMock);

  repoOrder.verify(repoSpy, times(1)).findJobs();
  repoOrder.verify(repoSpy, times(1)).getTransaction();
  repoOrder.verify(repoSpy, times(1)).deleteJobInputs(1, repoTransactionMock);
  repoOrder.verify(repoSpy, times(1)).deleteJobInputs(2, repoTransactionMock);
  repoOrder.verify(repoSpy, times(1)).upgradeDriverAndConfigs(any(MDriver.class), any(RepositoryTransaction.class));
  repoOrder.verify(repoSpy, times(2)).updateJob(any(MJob.class), any(RepositoryTransaction.class));
  repoOrder.verifyNoMoreInteractions();
  txOrder.verify(repoTransactionMock, times(1)).begin();
  txOrder.verify(repoTransactionMock, times(1)).commit();
  txOrder.verify(repoTransactionMock, times(1)).close();
  txOrder.verifyNoMoreInteractions();
  upgraderOrder.verify(driverUpgraderMock, times(2)).upgradeJobConfig(any(MDriverConfig.class), any(MDriverConfig.class));
  upgraderOrder.verifyNoMoreInteractions();
}
 
Example #22
Source File: JobResourceRequest.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
public ValidationResultBean update(String serverUrl, MJob job) {
  JobBean jobBean = new JobBean(job);
  // Extract all config inputs including sensitive inputs
  JSONObject jobJson = jobBean.extract(false);
  String response = super.put(serverUrl + RESOURCE + job.getPersistenceId(),
      jobJson.toJSONString());
  ValidationResultBean validationBean = new ValidationResultBean();
  validationBean.restore(JSONUtils.parse(response));
  return validationBean;
}
 
Example #23
Source File: TestJobManager.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetJob() {
  MJob testJob = job(123l, 456l);
  testJob.setEnabled(true);
  MJob mJobSpy = org.mockito.Mockito.spy(testJob);
  when(repositoryManagerMock.getRepository()).thenReturn(jdbcRepoMock);
  when(jdbcRepoMock.findJob(123l)).thenReturn(mJobSpy);
  assertEquals(jobManager.getJob(123l), mJobSpy);
  verify(repositoryManagerMock, times(1)).getRepository();
  verify(jdbcRepoMock, times(1)).findJob(123l);
}
 
Example #24
Source File: ShowJobFunction.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
private void showSummary() {
  List<MJob> jobs = client.getJobs();

  List<String> header = new LinkedList<String>();
  header.add(resourceString(Constants.RES_TABLE_HEADER_ID));
  header.add(resourceString(Constants.RES_TABLE_HEADER_NAME));
  header.add(resourceString(Constants.RES_TABLE_HEADER_FROM_CONNECTOR));
  header.add(resourceString(Constants.RES_TABLE_HEADER_TO_CONNECTOR));
  header.add(resourceString(Constants.RES_TABLE_HEADER_ENABLED));

  List<String> ids = new LinkedList<String>();
  List<String> names = new LinkedList<String>();
  List<String> fromConnectors = new LinkedList<String>();
  List<String> toConnectors = new LinkedList<String>();
  List<String> availabilities = new LinkedList<String>();

  for(MJob job : jobs) {
    ids.add(String.valueOf(job.getPersistenceId()));
    names.add(job.getName());
    fromConnectors.add(String.valueOf(
        job.getConnectorId(Direction.FROM)));
    toConnectors.add(String.valueOf(
        job.getConnectorId(Direction.TO)));
    availabilities.add(String.valueOf(job.getEnabled()));
  }

  TableDisplayer.display(header, ids, names, fromConnectors, toConnectors, availabilities);
}
 
Example #25
Source File: ShowJobFunction.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
private void showJobs(Long id) {
  List<MJob> jobs;
  if (id == null) {
    jobs = client.getJobs();
  } else {
    jobs = client.getJobsByConnector(id);
  }
  printlnResource(Constants.RES_SHOW_PROMPT_JOBS_TO_SHOW, jobs.size());

  for (MJob job : jobs) {
    displayJob(job);
  }
}
 
Example #26
Source File: BeanTestUtil.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
public static MJob createJob(String connectorName, String jobName, Long jobId, Date created, Date updated) {
  MJob job = BeanTestUtil.getJob(connectorName);
  job.setName(jobName);
  job.setPersistenceId(jobId);
  job.setCreationDate(created);
  job.setLastUpdateDate(updated);
  job.setEnabled(false);
  return job;
}
 
Example #27
Source File: Repository.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
private void deletelinksAndJobInputs(List<MLink> links, List<MJob> jobs, RepositoryTransaction tx) {
  for (MJob job : jobs) {
    deleteJobInputs(job.getPersistenceId(), tx);
  }
  for (MLink link : links) {
    deleteLinkInputs(link.getPersistenceId(), tx);
  }
}
 
Example #28
Source File: TestJdbcRepository.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * Test the exception handling procedure when the database handler fails to
 * update the job entity
 */
@Test
public void testDriverConfigUpgradeHandlerWithUpdateJobError() {
  MDriver driverConfig = driver();

  when(driverMock.getConfigurableUpgrader()).thenReturn(driverUpgraderMock);
  when(driverMock.getDriverJobConfigurationClass()).thenReturn(ValidConfiguration.class);
  List<MJob> jobList = jobs(job(1,1,1,1,1), job(2,1,1,2,1));
  doReturn(jobList).when(repoHandlerMock).findJobs(any(Connection.class));
  doNothing().when(repoHandlerMock).deleteJobInputs(anyLong(), any(Connection.class));
  doNothing().when(repoHandlerMock).upgradeDriverAndConfigs(any(MDriver.class), any(Connection.class));
  doReturn(true).when(repoHandlerMock).existsJob(anyLong(), any(Connection.class));

  SqoopException exception = new SqoopException(RepositoryError.JDBCREPO_0000,
      "update job error.");
  doThrow(exception).when(repoHandlerMock).updateJob(any(MJob.class), any(Connection.class));

  try {
    repoSpy.upgradeDriver(driverConfig);
  } catch (SqoopException ex) {
    assertEquals(ex.getMessage(), exception.getMessage());
    verify(repoHandlerMock, times(1)).findJobs(any(Connection.class));
    verify(repoHandlerMock, times(2)).deleteJobInputs(anyLong(), any(Connection.class));
    verify(repoHandlerMock, times(1)).upgradeDriverAndConfigs(any(MDriver.class), any(Connection.class));
    verify(repoHandlerMock, times(1)).existsJob(anyLong(), any(Connection.class));
    verify(repoHandlerMock, times(1)).updateJob(any(MJob.class), any(Connection.class));
    verifyNoMoreInteractions(repoHandlerMock);
    return ;
  }

  fail("Should throw out an exception with message: " + exception.getMessage());
}
 
Example #29
Source File: JobBean.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
protected JSONArray extractJobs(boolean skipSensitive) {
  JSONArray jobArray = new JSONArray();
  for (MJob job : jobs) {
    jobArray.add(extractJob(skipSensitive, job));
  }
  return jobArray;
}
 
Example #30
Source File: JdbcRepository.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public MJob findJob(final long id) {
  return (MJob) doWithConnection(new DoWithConnection() {
    @Override
    public Object doIt(Connection conn) {
      return handler.findJob(id, conn);
    }
  });
}