org.apache.sqoop.model.MLink Java Examples

The following examples show how to use org.apache.sqoop.model.MLink. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: CommonRepositoryHandler.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public MLink findLink(long linkId, Connection conn) {
  PreparedStatement linkFetchStmt = null;
  try {
    linkFetchStmt = conn.prepareStatement(crudQueries.getStmtSelectLinkSingle());
    linkFetchStmt.setLong(1, linkId);

    List<MLink> links = loadLinks(linkFetchStmt, conn);

    if (links.size() != 1) {
      throw new SqoopException(CommonRepositoryError.COMMON_0021, "Couldn't find"
          + " link with id " + linkId);
    }

    // Return the first and only one link object with the given id
    return links.get(0);

  } catch (SQLException ex) {
    logException(ex, linkId);
    throw new SqoopException(CommonRepositoryError.COMMON_0020, ex);
  } finally {
    closeStatements(linkFetchStmt);
  }
}
 
Example #2
Source File: TestLinkHandling.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test
public void testCreateLink() throws Exception {
  List<MConfig> configs;

  MLink retrieved = handler.findLink(1, provider.getConnection());
  assertEquals(1, retrieved.getPersistenceId());

  configs = retrieved.getConnectorLinkConfig().getConfigs();
  assertEquals("Value1", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(0).getInputs().get(1).getValue());
  assertEquals("Value2", configs.get(1).getInputs().get(0).getValue());
  assertNull(configs.get(1).getInputs().get(1).getValue());

  retrieved = handler.findLink(2, provider.getConnection());
  assertEquals(2, retrieved.getPersistenceId());

  configs = retrieved.getConnectorLinkConfig().getConfigs();
  assertEquals("Value1", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(0).getInputs().get(1).getValue());
  assertEquals("Value2", configs.get(1).getInputs().get(0).getValue());
  assertNull(configs.get(1).getInputs().get(1).getValue());

  Assert.assertEquals(provider.rowCount(new TableName("SQOOP", "SQ_LINK")), 2);
  Assert.assertEquals(provider.rowCount(new TableName("SQOOP", "SQ_LINK_INPUT")), 4);
}
 
Example #3
Source File: ShowLinkFunction.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
private void displayLink(MLink link) {
  DateFormat formatter = DateFormat.getDateTimeInstance(DateFormat.SHORT, DateFormat.SHORT);

  printlnResource(Constants.RES_SHOW_PROMPT_LINK_INFO,
    link.getPersistenceId(),
    link.getName(),
    link.getEnabled(),
    link.getCreationUser(),
    formatter.format(link.getCreationDate()),
    link.getLastUpdateUser(),
    formatter.format(link.getLastUpdateDate())
  );

  long connectorId = link.getConnectorId();
  MConnector connector = client.getConnector(connectorId);
  String connectorName = "";
  if (connector != null) {
    connectorName = connector.getUniqueName();
  }
  printlnResource(Constants.RES_SHOW_PROMPT_LINK_CID_INFO, connectorName, connectorId);

  // Display link config
  displayConfig(link.getConnectorLinkConfig().getConfigs(),
  client.getConnectorConfigBundle(connectorId));
}
 
Example #4
Source File: TestLinkHandling.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test
public void testFindLinksByConnector() throws Exception {
  List<MLink> list;
  Long connectorId = handler.findConnector("A", provider.getConnection()).getPersistenceId();

  // Load all two links on loaded repository
  list = handler.findLinksForConnector(connectorId, provider.getConnection());
  assertEquals(1, list.size());
  assertEquals(LINK_A_NAME, list.get(0).getName());

  // Delete links
  for (MLink link : handler.findLinks(provider.getConnection())) {
    handler.deleteLink(link.getPersistenceId(), provider.getConnection());
  }

  // Load empty list on empty repository
  list = handler.findLinksForConnector(connectorId, provider.getConnection());
  assertEquals(0, list.size());
}
 
Example #5
Source File: TestLinkHandling.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test
public void testEnableAndDisableLink() throws Exception {
  // disable link 1
  handler.enableLink(1, false, provider.getConnection());

  MLink retrieved = handler.findLink(1, provider.getConnection());
  assertNotNull(retrieved);
  assertEquals(false, retrieved.getEnabled());

  // enable link 1
  handler.enableLink(1, true, provider.getConnection());

  retrieved = handler.findLink(1, provider.getConnection());
  assertNotNull(retrieved);
  assertEquals(true, retrieved.getEnabled());
}
 
Example #6
Source File: TestJobHandling.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@BeforeMethod(alwaysRun = true)
public void setUp() throws Exception {
  super.setUp();

  handler.registerDriver(getDriver(), provider.getConnection());
  MConnector connectorA = getConnector(CONNECTOR_A_NAME, CONNECTOR_A_CLASSNAME, CONNECTOR_A_VERSION, true, true);
  MConnector connectorB = getConnector(CONNECTOR_B_NAME, CONNECTOR_B_CLASSNAME, CONNECTOR_B_VERSION, true, true);
  handler.registerConnector(connectorA, provider.getConnection());
  handler.registerConnector(connectorB, provider.getConnection());
  MLink linkA = getLink(LINK_A_NAME, connectorA);
  MLink linkB = getLink(LINK_B_NAME, connectorB);
  handler.createLink(linkA, provider.getConnection());
  handler.createLink(linkB, provider.getConnection());
  handler.createJob(getJob(JOB_A_NAME, connectorA, connectorB, linkA, linkB), provider.getConnection());
  handler.createJob(getJob(JOB_B_NAME, connectorB, connectorA, linkB, linkA), provider.getConnection());
}
 
Example #7
Source File: ConfigDisplayer.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * Method prints the warning message of WARNING status
 * @param entity - link or job instance
 */
public static void displayConfigWarning(MAccountableEntity entity) {
  List<MConfig> configList = new ArrayList<MConfig>();
  boolean showMessage = true;
  if (entity instanceof MLink) {
    MLink link = (MLink) entity;
    configList.addAll(link.getConnectorLinkConfig().getConfigs());
  } else if(entity instanceof MJob) {
    MJob job = (MJob) entity;
    configList.addAll(job.getJobConfig(Direction.FROM).getConfigs());
    configList.addAll(job.getDriverConfig().getConfigs());
    configList.addAll(job.getJobConfig(Direction.TO).getConfigs());
  }
  for(MConfig config : configList) {
    if(config.getValidationStatus() == Status.WARNING) {
      if(showMessage) {
        print("\n@|yellow %s|@\n", resourceString(Constants.RES_CONFIG_DISPLAYER_FORM_WARNING));
        showMessage = false;
      }
      for(Message message : config.getValidationMessages()) {
        ConfigFiller.warningMessage(message.getMessage());
      }
    }
  }
}
 
Example #8
Source File: CommonRepositoryHandler.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public MLink findLink(String linkName, Connection conn) {
  PreparedStatement linkFetchStmt = null;
  try {
    linkFetchStmt = conn.prepareStatement(crudQueries.getStmtSelectLinkSingleByName());
    linkFetchStmt.setString(1, linkName);

    List<MLink> links = loadLinks(linkFetchStmt, conn);

    if (links.size() != 1) {
      return null;
    }

    // Return the first and only one link object with the given name
    return links.get(0);

  } catch (SQLException ex) {
    logException(ex, linkName);
    throw new SqoopException(CommonRepositoryError.COMMON_0020, ex);
  } finally {
    closeStatements(linkFetchStmt);
  }
}
 
Example #9
Source File: TestLinkHandling.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test
public void testFindLinks() throws Exception {
  List<MLink> list;

  // Load empty list on empty repository
  list = handler.findLinks(getDerbyDatabaseConnection());
  assertEquals(0, list.size());

  loadLinksForLatestVersion();

  // Load all two links on loaded repository
  list = handler.findLinks(getDerbyDatabaseConnection());
  assertEquals(2, list.size());

  assertEquals("CA", list.get(0).getName());
  assertEquals("CB", list.get(1).getName());
}
 
Example #10
Source File: JdbcRepository.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public void updateLink(final MLink link, RepositoryTransaction tx) {
  doWithConnection(new DoWithConnection() {
    @Override
    public Object doIt(Connection conn) {
      if (!link.hasPersistenceId()) {
        throw new SqoopException(RepositoryError.JDBCREPO_0016);
      }
      if (!handler.existsLink(link.getPersistenceId(), conn)) {
        throw new SqoopException(RepositoryError.JDBCREPO_0017, "Invalid id: "
            + link.getPersistenceId());
      }

      handler.updateLink(link, conn);
      return null;
    }
  }, (JdbcRepositoryTransaction) tx);
}
 
Example #11
Source File: TestLinkHandling.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
public void testFindLinksByNonExistingConnector() throws Exception {
  List<MLink> list;

  // Load empty list on empty repository
  list = handler.findLinks(getDerbyDatabaseConnection());
  assertEquals(0, list.size());

  loadLinksForLatestVersion();

  list = handler.findLinksForConnector(2, getDerbyDatabaseConnection());
  assertEquals(0, list.size());
}
 
Example #12
Source File: TestJdbcRepository.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * Test the driverConfig upgrade procedure, when all jobs
 * using the old connector are still valid for the new connector
 */
@Test
public void testDriverConfigUpgradeWithValidJobs() {
  MDriver newDriverConfig = driver();

  when(driverMock.getConfigurableUpgrader()).thenReturn(driverUpgraderMock);
  when(driverMock.getDriverJobConfigurationClass()).thenReturn(ValidConfiguration.class);
  List<MJob> jobList = jobs(job(1,1,1,1,1), job(2,1,1,2,1));

  doReturn(jobList).when(repoSpy).findJobs();
  doNothing().when(repoSpy).updateLink(any(MLink.class), any(RepositoryTransaction.class));
  doNothing().when(repoSpy).updateJob(any(MJob.class), any(RepositoryTransaction.class));
  doNothing().when(repoSpy).upgradeDriverAndConfigs(any(MDriver.class), any(RepositoryTransaction.class));

  repoSpy.upgradeDriver(newDriverConfig);

  InOrder repoOrder = inOrder(repoSpy);
  InOrder txOrder = inOrder(repoTransactionMock);
  InOrder upgraderOrder = inOrder(driverUpgraderMock);

  repoOrder.verify(repoSpy, times(1)).findJobs();
  repoOrder.verify(repoSpy, times(1)).getTransaction();
  repoOrder.verify(repoSpy, times(1)).deleteJobInputs(1, repoTransactionMock);
  repoOrder.verify(repoSpy, times(1)).deleteJobInputs(2, repoTransactionMock);
  repoOrder.verify(repoSpy, times(1)).upgradeDriverAndConfigs(any(MDriver.class), any(RepositoryTransaction.class));
  repoOrder.verify(repoSpy, times(2)).updateJob(any(MJob.class), any(RepositoryTransaction.class));
  repoOrder.verifyNoMoreInteractions();
  txOrder.verify(repoTransactionMock, times(1)).begin();
  txOrder.verify(repoTransactionMock, times(1)).commit();
  txOrder.verify(repoTransactionMock, times(1)).close();
  txOrder.verifyNoMoreInteractions();
  upgraderOrder.verify(driverUpgraderMock, times(2)).upgradeJobConfig(any(MDriverConfig.class), any(MDriverConfig.class));
  upgraderOrder.verifyNoMoreInteractions();
}
 
Example #13
Source File: TestLinkHandling.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test(expectedExceptions = SqoopException.class)
public void testCreateDuplicateLink() throws Exception {
  MLink link = getLink();
  fillLink(link);
  link.setName("test");
  handler.createLink(link, getDerbyDatabaseConnection());
  assertEquals(1, link.getPersistenceId());

  link.setPersistenceId(MLink.PERSISTANCE_ID_DEFAULT);
  handler.createLink(link, getDerbyDatabaseConnection());
}
 
Example #14
Source File: RangerSqoopAuthorizerTest.java    From ranger with Apache License 2.0 5 votes vote down vote up
/**
 * Help function: init sqoop to enable ranger authentication
 */
private static void initSqoopAuth() throws IOException, ClassNotFoundException, IllegalAccessException,
		InstantiationException {
	// init sqoop configruation
	String basedir = System.getProperty("basedir");
	if (basedir == null) {
		basedir = new File(".").getCanonicalPath();
	}
	String sqoopConfigDirPath = basedir + "/src/test/resources/";
	System.setProperty(ConfigurationConstants.SYSPROP_CONFIG_DIR, sqoopConfigDirPath);
	SqoopConfiguration.getInstance().initialize();

	// init sqoop authorization
	AuthorizationManager.getInstance().initialize();

	// mock sqoop class for authentication
	RepositoryManager repositoryManager = mock(RepositoryManager.class);
	RepositoryManager.setInstance(repositoryManager);
	Repository repository = mock(Repository.class);
	when(repositoryManager.getRepository()).thenReturn(repository);

	MLink link = mock(MLink.class);
	when(repository.findLink(anyString())).thenReturn(link);
	MJob job = mock(MJob.class);
	when(repository.findJob(anyString())).thenReturn(job);

	// mock user "zhangqiang" as the creator of any link and any job
	when(link.getCreationUser()).thenReturn(ZHANGQIANG);
	when(job.getCreationUser()).thenReturn(ZHANGQIANG);
}
 
Example #15
Source File: FromRDBMSToKafkaTest.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test
public void testBasic() throws Exception {
  createAndLoadTableCities();

  // Kafka link
  MLink kafkaLink = getClient().createLink("kafka-connector");
  fillKafkaLinkConfig(kafkaLink);
  saveLink(kafkaLink);

  // RDBMS link
  MLink rdbmsLink = getClient().createLink("generic-jdbc-connector");
  fillRdbmsLinkConfig(rdbmsLink);
  saveLink(rdbmsLink);

  // Job creation
  MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), kafkaLink.getPersistenceId());

  // set rdbms "FROM" job config
  fillRdbmsFromConfig(job, "id");

  // set Kafka  "TO" job config
  fillKafkaToConfig(job);

  // driver config
  MDriverConfig driverConfig = job.getDriverConfig();
  driverConfig.getIntegerInput("throttlingConfig.numExtractors").setValue(3);
  saveJob(job);

  executeJob(job);

  // this will assert the content of the array matches the content of the topic
  validateContent(input);
}
 
Example #16
Source File: OutputDirectoryTest.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test
public void testOutputDirectoryIsNotEmpty() throws Exception {
  createAndLoadTableCities();

  hdfsClient.mkdirs(new Path(getHadoopTestDirectory()));
  hdfsClient.createNewFile(new Path(getHadoopTestDirectory() + "/x"));

  // RDBMS link
  MLink rdbmsConnection = getClient().createLink("generic-jdbc-connector");
  fillRdbmsLinkConfig(rdbmsConnection);
  saveLink(rdbmsConnection);

  // HDFS link
  MLink hdfsConnection = getClient().createLink("hdfs-connector");
  fillHdfsLink(hdfsConnection);
  saveLink(hdfsConnection);

  // Job creation
  MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());

  // Set rdbms "FROM" config
  fillRdbmsFromConfig(job, "id");

  // fill the hdfs "TO" config
  fillHdfsToConfig(job, ToFormat.TEXT_FILE);

  saveJob(job);

  assertJobSubmissionFailure(job,
    HdfsConnectorError.GENERIC_HDFS_CONNECTOR_0007.toString(),
    "is not empty"
  );

  dropTable();
}
 
Example #17
Source File: TestJdbcRepository.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * Test the exception handling procedure when the database handler fails to
 * delete job inputs for a given connector
 */
@Test
public void testConnectorConfigUpgradeHandlerWithDeleteJobInputsError() {
  MConnector newConnector = connector(1, "1.1");
  MConnector oldConnector = connector(1);

  SqoopConnector sqconnector = mock(SqoopConnector.class);
  when(sqconnector.getConfigurableUpgrader()).thenReturn(connectorUpgraderMock);
  when(connectorMgrMock.getSqoopConnector(anyString())).thenReturn(sqconnector);

  List<MLink> linkList = links(link(1,1), link(2,1));
  List<MJob> jobList = jobs(job(1,1,1,1,1), job(2,1,1,2,1));
  doReturn(linkList).when(repoHandlerMock).findLinksForConnector(anyLong(), any(Connection.class));
  doReturn(jobList).when(repoHandlerMock).findJobsForConnector(anyLong(), any(Connection.class));

  SqoopException exception = new SqoopException(RepositoryError.JDBCREPO_0000,
      "delete job inputs for connector error.");
  doThrow(exception).when(repoHandlerMock).deleteJobInputs(anyLong(), any(Connection.class));

  try {
    repoSpy.upgradeConnector(oldConnector, newConnector);
  } catch (SqoopException ex) {
    assertEquals(ex.getMessage(), exception.getMessage());
    verify(repoHandlerMock, times(1)).findLinksForConnector(anyLong(), any(Connection.class));
    verify(repoHandlerMock, times(1)).findJobsForConnector(anyLong(), any(Connection.class));
    verify(repoHandlerMock, times(1)).deleteJobInputs(anyLong(), any(Connection.class));
    verifyNoMoreInteractions(repoHandlerMock);
    return ;
  }

  fail("Should throw out an exception with message: " + exception.getMessage());
}
 
Example #18
Source File: LinkDynamicConfigOptions.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("static-access")
@Override
public void prepareOptions(MLink link) {
  this.addOption(OptionBuilder.withLongOpt("name").hasArg().create());
  for (Option option : ConfigOptions.getConfigsOptions("link", link.getConnectorLinkConfig()
      .getConfigs())) {
    this.addOption(option);
  }
}
 
Example #19
Source File: JobManager.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
MLink getLink(long linkId) {
  MLink link = RepositoryManager.getInstance().getRepository()
      .findLink(linkId);
  if (!link.getEnabled()) {
    throw new SqoopException(DriverError.DRIVER_0010, "Connection id: "
        + link.getPersistenceId());
  }
  return link;
}
 
Example #20
Source File: OutputDirectoryTest.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test
public void testOutputDirectoryIsAFile() throws Exception {
  createAndLoadTableCities();

  hdfsClient.createNewFile(new Path(getHadoopTestDirectory()));

  // RDBMS link
  MLink rdbmsConnection = getClient().createLink("generic-jdbc-connector");
  fillRdbmsLinkConfig(rdbmsConnection);
  saveLink(rdbmsConnection);

  // HDFS link
  MLink hdfsConnection = getClient().createLink("hdfs-connector");
  fillHdfsLink(hdfsConnection);
  saveLink(hdfsConnection);

  // Job creation
  MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());

  // Set rdbms "FROM" config
  fillRdbmsFromConfig(job, "id");

  // fill the hdfs "TO" config
  fillHdfsToConfig(job, ToFormat.TEXT_FILE);

  saveJob(job);

  assertJobSubmissionFailure(job,
    HdfsConnectorError.GENERIC_HDFS_CONNECTOR_0007.toString(),
    "is a file"
  );

  dropTable();
}
 
Example #21
Source File: TomcatSqoopRunner.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
/**
 * Fill link config based on currently active provider.
 *
 * @param link MLink object to fill
 */
public void fillRdbmsLinkConfig(MLink link) {
  MConfigList configs = link.getConnectorLinkConfig();
  configs.getStringInput("linkConfig.jdbcDriver").setValue(provider.getJdbcDriver());
  configs.getStringInput("linkConfig.connectionString").setValue(provider.getConnectionUrl());
  configs.getStringInput("linkConfig.username").setValue(provider.getConnectionUsername());
  configs.getStringInput("linkConfig.password").setValue(provider.getConnectionPassword());
}
 
Example #22
Source File: TestJobManager.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test
public void testGetLink() {
  MLink testLink = new MLink(123l, null);
  testLink.setEnabled(true);
  MLink mConnectionSpy = org.mockito.Mockito.spy(testLink);
  when(repositoryManagerMock.getRepository()).thenReturn(jdbcRepoMock);
  when(jdbcRepoMock.findLink(123l)).thenReturn(mConnectionSpy);
  assertEquals(jobManager.getLink(123l), mConnectionSpy);
  verify(repositoryManagerMock, times(1)).getRepository();
  verify(jdbcRepoMock, times(1)).findLink(123l);
}
 
Example #23
Source File: TestJdbcRepository.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * Test the exception handling procedure when the database handler fails to
 * find jobs for a given connector
 */
@Test
public void testConnectorConfigUpgradeHandlerWithFindJobsForConnectorError() {
  MConnector newConnector = connector(1, "1.1");
  MConnector oldConnector = connector(1);

  SqoopConnector sqconnector = mock(SqoopConnector.class);
  when(sqconnector.getConfigurableUpgrader()).thenReturn(connectorUpgraderMock);
  when(connectorMgrMock.getSqoopConnector(anyString())).thenReturn(sqconnector);

  List<MLink> linkList = links(link(1,1), link(2,1));
  doReturn(linkList).when(repoHandlerMock).findLinksForConnector(anyLong(), any(Connection.class));

  SqoopException exception = new SqoopException(RepositoryError.JDBCREPO_0000,
      "find jobs for connector error.");
  doThrow(exception).when(repoHandlerMock).findJobsForConnector(anyLong(), any(Connection.class));

  try {
    repoSpy.upgradeConnector(oldConnector, newConnector);
  } catch (SqoopException ex) {
    assertEquals(ex.getMessage(), exception.getMessage());
    verify(repoHandlerMock, times(1)).findLinksForConnector(anyLong(), any(Connection.class));
    verify(repoHandlerMock, times(1)).findJobsForConnector(anyLong(), any(Connection.class));
    verifyNoMoreInteractions(repoHandlerMock);
    return ;
  }

  fail("Should throw out an exception with message: " + exception.getMessage());
}
 
Example #24
Source File: RepositoryDumpTool.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
private JSONObject dump(boolean skipSensitive) {

    RepositoryManager.getInstance().initialize(true);
    ConnectorManager.getInstance().initialize();

    Repository repository = RepositoryManager.getInstance().getRepository();


    JSONObject result = new JSONObject();

    LOG.info("Dumping Links with skipSensitive=" + String.valueOf(skipSensitive));
    List<MLink> links = repository.findLinks();
    LinksBean linkBeans = new LinksBean(links);
    JSONObject linksJsonObject = linkBeans.extract(skipSensitive);
    JSONArray linksJsonArray = (JSONArray)linksJsonObject.get(JSONConstants.LINKS);
    addConnectorName(linksJsonArray, JSONConstants.CONNECTOR_ID);
    result.put(JSONConstants.LINKS, linksJsonObject);

    LOG.info("Dumping Jobs with skipSensitive=" + String.valueOf(skipSensitive));
    JobsBean jobs = new JobsBean(repository.findJobs());
    JSONObject jobsJsonObject = jobs.extract(skipSensitive);
    JSONArray jobsJsonArray = (JSONArray)jobsJsonObject.get(JSONConstants.JOBS);
    addConnectorName(jobsJsonArray, JSONConstants.FROM_CONNECTOR_ID);
    addConnectorName(jobsJsonArray, JSONConstants.TO_CONNECTOR_ID);
    result.put(JSONConstants.JOBS, jobsJsonObject);

    LOG.info("Dumping Submissions with skipSensitive=" + String.valueOf(skipSensitive));
    SubmissionsBean submissions = new SubmissionsBean(repository.findSubmissions());
    JSONObject submissionsJsonObject = submissions.extract(skipSensitive);
    result.put(JSONConstants.SUBMISSIONS, submissionsJsonObject);

    result.put(JSONConstants.METADATA, repoMetadata(skipSensitive));

    return result;
  }
 
Example #25
Source File: CommonRepositoryHandler.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public void updateLink(MLink link, Connection conn) {
  PreparedStatement deleteStmt = null;
  PreparedStatement updateStmt = null;
  try {
    // Firstly remove old values
    deleteStmt = conn.prepareStatement(crudQueries.getStmtDeleteLinkInput());
    deleteStmt.setLong(1, link.getPersistenceId());
    deleteStmt.executeUpdate();

    // Update LINK_CONFIG table
    updateStmt = conn.prepareStatement(crudQueries.getStmtUpdateLink());
    updateStmt.setString(1, link.getName());
    updateStmt.setString(2, link.getLastUpdateUser());
    updateStmt.setTimestamp(3, new Timestamp(new Date().getTime()));

    updateStmt.setLong(4, link.getPersistenceId());
    updateStmt.executeUpdate();

    // And reinsert new values
    createInputValues(crudQueries.getStmtInsertLinkInput(),
        link.getPersistenceId(),
        link.getConnectorLinkConfig().getConfigs(),
        conn);

  } catch (SQLException ex) {
    logException(ex, link);
    throw new SqoopException(CommonRepositoryError.COMMON_0018, ex);
  } finally {
    closeStatements(deleteStmt, updateStmt);
  }
}
 
Example #26
Source File: TestJdbcRepository.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * Test the exception handling procedure when the database handler fails to
 * delete link inputs for a given connector
 */
@Test
public void testConnectorConfigUpgradeHandlerWithDeleteLinkInputsError() {
  MConnector newConnector = connector(1, "1.1");
  MConnector oldConnector = connector(1);

  SqoopConnector sqconnector = mock(SqoopConnector.class);
  when(sqconnector.getConfigurableUpgrader()).thenReturn(connectorUpgraderMock);
  when(connectorMgrMock.getSqoopConnector(anyString())).thenReturn(sqconnector);

  List<MLink> linkList = links(link(1,1), link(2,1));
  List<MJob> jobList = jobs(job(1,1,1,1,1), job(2,1,1,2,1));
  doReturn(linkList).when(repoHandlerMock).findLinksForConnector(anyLong(), any(Connection.class));
  doReturn(jobList).when(repoHandlerMock).findJobsForConnector(anyLong(), any(Connection.class));
  doNothing().when(repoHandlerMock).deleteJobInputs(anyLong(), any(Connection.class));

  SqoopException exception = new SqoopException(RepositoryError.JDBCREPO_0000,
      "delete link inputs for connector error.");
  doThrow(exception).when(repoHandlerMock).deleteLinkInputs(anyLong(), any(Connection.class));

  try {
    repoSpy.upgradeConnector(oldConnector, newConnector);
  } catch (SqoopException ex) {
    assertEquals(ex.getMessage(), exception.getMessage());
    verify(repoHandlerMock, times(1)).findLinksForConnector(anyLong(), any(Connection.class));
    verify(repoHandlerMock, times(1)).findJobsForConnector(anyLong(), any(Connection.class));
    verify(repoHandlerMock, times(2)).deleteJobInputs(anyLong(), any(Connection.class));
    verify(repoHandlerMock, times(1)).deleteLinkInputs(anyLong(), any(Connection.class));
    verifyNoMoreInteractions(repoHandlerMock);
    return ;
  }

  fail("Should throw out an exception with message: " + exception.getMessage());
}
 
Example #27
Source File: LinkBean.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
private JSONObject extractLink(boolean skipSensitive, MLink link) {
  JSONObject linkJsonObject = new JSONObject();
  linkJsonObject.put(ID, link.getPersistenceId());
  linkJsonObject.put(NAME, link.getName());
  linkJsonObject.put(ENABLED, link.getEnabled());
  linkJsonObject.put(CREATION_USER, link.getCreationUser());
  linkJsonObject.put(CREATION_DATE, link.getCreationDate().getTime());
  linkJsonObject.put(UPDATE_USER, link.getLastUpdateUser());
  linkJsonObject.put(UPDATE_DATE, link.getLastUpdateDate().getTime());
  linkJsonObject.put(CONNECTOR_ID, link.getConnectorId());
  linkJsonObject.put(LINK_CONFIG_VALUES,
    extractConfigList(link.getConnectorLinkConfig().getConfigs(), link.getConnectorLinkConfig().getType(), skipSensitive));
  return linkJsonObject;
}
 
Example #28
Source File: LinkBean.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@SuppressWarnings("unchecked")
protected JSONArray extractLinks(boolean skipSensitive) {
  JSONArray linkArray = new JSONArray();
  for (MLink link : links) {
    linkArray.add(extractLink(skipSensitive, link));
  }
  return linkArray;
}
 
Example #29
Source File: LinkResourceRequest.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
public ValidationResultBean create(String serverUrl, MLink link) {
  LinkBean linkBean = new LinkBean(link);
  // Extract all config inputs including sensitive inputs
  JSONObject linkJson = linkBean.extract(false);
  String response = super.post(serverUrl + LINK_RESOURCE, linkJson.toJSONString());
  ValidationResultBean validationBean = new ValidationResultBean();
  validationBean.restore(JSONUtils.parse(response));
  return validationBean;
}
 
Example #30
Source File: LinkResourceRequest.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
public ValidationResultBean update(String serverUrl, MLink link) {
  LinkBean linkBean = new LinkBean(link);
  // Extract all config inputs including sensitive inputs
  JSONObject linkJson = linkBean.extract(false);
  String response = super.put(serverUrl + LINK_RESOURCE + link.getPersistenceId(), linkJson.toJSONString());
  ValidationResultBean validationBean = new ValidationResultBean();
  validationBean.restore(JSONUtils.parse(response));
  return validationBean;
}