Java Code Examples for org.apache.sqoop.model.MLink#getPersistenceId()

The following examples show how to use org.apache.sqoop.model.MLink#getPersistenceId() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestJobManager.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test
public void testDisabledLink() {
  MLink testConnection = new MLink(123l, null);
  testConnection.setPersistenceId(1234);
  testConnection.setEnabled(false);
  SqoopException exception = new SqoopException(DriverError.DRIVER_0010, "Connection id: "
      + testConnection.getPersistenceId());

  MLink mConnectionSpy = org.mockito.Mockito.spy(testConnection);
  when(repositoryManagerMock.getRepository()).thenReturn(jdbcRepoMock);
  when(jdbcRepoMock.findLink(123l)).thenReturn(mConnectionSpy);
  try {
    jobManager.getLink(123l);
  } catch (SqoopException ex) {
    assertEquals(ex.getMessage(), exception.getMessage());
    verify(repositoryManagerMock, times(1)).getRepository();
    verify(jdbcRepoMock, times(1)).findLink(123l);
  }
}
 
Example 2
Source File: LinkResourceRequest.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
public ValidationResultBean update(String serverUrl, MLink link) {
  LinkBean linkBean = new LinkBean(link);
  // Extract all config inputs including sensitive inputs
  JSONObject linkJson = linkBean.extract(false);
  String response = super.put(serverUrl + LINK_RESOURCE + link.getPersistenceId(), linkJson.toJSONString());
  ValidationResultBean validationBean = new ValidationResultBean();
  validationBean.restore(JSONUtils.parse(response));
  return validationBean;
}
 
Example 3
Source File: RepositoryLoadTool.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
private long loadLink(MLink link) {

    // starting by pretending we have a brand new link
    resetPersistenceId(link);

    Repository repository = RepositoryManager.getInstance().getRepository();

    MConnector mConnector = ConnectorManager.getInstance().getConnectorConfigurable(link.getConnectorId());
    ConnectorConfigurableUpgrader connectorConfigUpgrader = ConnectorManager.getInstance().getSqoopConnector(mConnector.getUniqueName()).getConfigurableUpgrader();

    List<MConfig> connectorConfigs = mConnector.getLinkConfig().clone(false).getConfigs();
    MLinkConfig newLinkConfigs = new MLinkConfig(connectorConfigs);

    // upgrading the configs to make sure they match the current repository
    connectorConfigUpgrader.upgradeLinkConfig(link.getConnectorLinkConfig(), newLinkConfigs);
    MLink newLink = new MLink(link, newLinkConfigs);

    // Transform config structures to objects for validations
    SqoopConnector connector = ConnectorManager.getInstance().getSqoopConnector(
        link.getConnectorId());

    Object connectorConfig = ClassUtils.instantiate(connector.getLinkConfigurationClass());

    ConfigUtils.fromConfigs(link.getConnectorLinkConfig().getConfigs(), connectorConfig);

    ConfigValidationRunner validationRunner = new ConfigValidationRunner();
    ConfigValidationResult result = validationRunner.validate(connectorConfig);

    Status finalStatus = Status.getWorstStatus(result.getStatus());

    if (finalStatus.canProceed()) {
      repository.createLink(newLink);

    } else {
      LOG.error("Failed to load link:" + link.getName());
      LOG.error("Status of connector configs:" + result.getStatus().toString());
    }
    return newLink.getPersistenceId();
  }
 
Example 4
Source File: JobManager.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
MLink getLink(long linkId) {
  MLink link = RepositoryManager.getInstance().getRepository()
      .findLink(linkId);
  if (!link.getEnabled()) {
    throw new SqoopException(DriverError.DRIVER_0010, "Connection id: "
        + link.getPersistenceId());
  }
  return link;
}
 
Example 5
Source File: SqoopJDBCKafkaJob.java    From sqoop-on-spark with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {

    final SqoopSparkJob sparkJob = new SqoopSparkJob();
    CommandLine cArgs = SqoopSparkJob.parseArgs(createOptions(), args);
    SparkConf conf = sparkJob.init(cArgs);
    JavaSparkContext context = new JavaSparkContext(conf);

    MConnector fromConnector = RepositoryManager.getInstance().getRepository()
        .findConnector("generic-jdbc-connector");

    MLinkConfig fromLinkConfig = fromConnector.getLinkConfig();
    MLink fromLink = new MLink(fromConnector.getPersistenceId(), fromLinkConfig);
    fromLink.setName("jdbcLink-" + System.currentTimeMillis());

    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.jdbcDriver")
        .setValue("com.mysql.jdbc.Driver");

    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.connectionString")
        .setValue(cArgs.getOptionValue("jdbcString"));
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.username")
        .setValue(cArgs.getOptionValue("u"));
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.password")
        .setValue(cArgs.getOptionValue("p"));

    RepositoryManager.getInstance().getRepository().createLink(fromLink);

    MConnector toConnector = RepositoryManager.getInstance().getRepository()
        .findConnector("kafka-connector");

    MLinkConfig toLinkConfig = toConnector.getLinkConfig();

    MLink toLink = new MLink(toConnector.getPersistenceId(), toLinkConfig);
    toLink.setName("kafkaLink-" + System.currentTimeMillis());

    toLink.getConnectorLinkConfig().getStringInput("linkConfig.brokerList")
        .setValue(cArgs.getOptionValue("broker"));
    toLink.getConnectorLinkConfig().getStringInput("linkConfig.zookeeperConnect")
        .setValue(cArgs.getOptionValue("zk"));

    RepositoryManager.getInstance().getRepository().createLink(toLink);

    MFromConfig fromJobConfig = fromConnector.getFromConfig();
    MToConfig toJobConfig = toConnector.getToConfig();

    MJob sqoopJob = new MJob(fromConnector.getPersistenceId(), toConnector.getPersistenceId(),
        fromLink.getPersistenceId(), toLink.getPersistenceId(), fromJobConfig, toJobConfig, Driver
            .getInstance().getDriver().getDriverConfig());
    // jdbc configs
    MFromConfig fromConfig = sqoopJob.getFromJobConfig();
    fromConfig.getStringInput("fromJobConfig.tableName").setValue(cArgs.getOptionValue("table"));
    fromConfig.getStringInput("fromJobConfig.partitionColumn").setValue(cArgs.getOptionValue("partitionCol"));
    // kafka configs
    MToConfig toConfig = sqoopJob.getToJobConfig();
    toConfig.getStringInput("toJobConfig.topic").setValue("test-spark-topic");

    MDriverConfig driverConfig = sqoopJob.getDriverConfig();
    if (cArgs.getOptionValue("numE") != null) {
      driverConfig.getIntegerInput("throttlingConfig.numExtractors").setValue(
          Integer.valueOf(cArgs.getOptionValue("numE")));
    }
    if (cArgs.getOptionValue("numL") != null) {

      driverConfig.getIntegerInput("throttlingConfig.numLoaders").setValue(
          Integer.valueOf(cArgs.getOptionValue("numL")));
    }    RepositoryManager.getInstance().getRepository().createJob(sqoopJob);
    sparkJob.setJob(sqoopJob);
    sparkJob.execute(conf, context);
  }
 
Example 6
Source File: SqoopJDBCHDFSJob.java    From sqoop-on-spark with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {

    final SqoopSparkJob sparkJob = new SqoopSparkJob();
    CommandLine cArgs = SqoopSparkJob.parseArgs(createOptions(), args);
    SparkConf conf = sparkJob.init(cArgs);
    JavaSparkContext context = new JavaSparkContext(conf);

    MConnector fromConnector = RepositoryManager.getInstance().getRepository()
        .findConnector("generic-jdbc-connector");
    MConnector toConnector = RepositoryManager.getInstance().getRepository()
        .findConnector("hdfs-connector");

    MLinkConfig fromLinkConfig = fromConnector.getLinkConfig();
    MLinkConfig toLinkConfig = toConnector.getLinkConfig();

    MLink fromLink = new MLink(fromConnector.getPersistenceId(), fromLinkConfig);
    fromLink.setName("jdbcLink-" + System.currentTimeMillis());
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.jdbcDriver")
        .setValue("com.mysql.jdbc.Driver");

    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.connectionString")
        .setValue(cArgs.getOptionValue("jdbcString"));
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.username")
        .setValue(cArgs.getOptionValue("u"));
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.password")
        .setValue(cArgs.getOptionValue("p"));
    RepositoryManager.getInstance().getRepository().createLink(fromLink);

    MLink toLink = new MLink(toConnector.getPersistenceId(), toLinkConfig);
    toLink.setName("hdfsLink-" + System.currentTimeMillis());
    toLink.getConnectorLinkConfig().getStringInput("linkConfig.confDir")
        .setValue(cArgs.getOptionValue("outputDir"));
    RepositoryManager.getInstance().getRepository().createLink(toLink);

    MFromConfig fromJobConfig = fromConnector.getFromConfig();
    MToConfig toJobConfig = toConnector.getToConfig();

    MJob sqoopJob = new MJob(fromConnector.getPersistenceId(), toConnector.getPersistenceId(),
        fromLink.getPersistenceId(), toLink.getPersistenceId(), fromJobConfig, toJobConfig, Driver
            .getInstance().getDriver().getDriverConfig());

    MConfigList fromConfig = sqoopJob.getJobConfig(Direction.FROM);
    fromConfig.getStringInput("fromJobConfig.tableName").setValue(cArgs.getOptionValue("table"));
    fromConfig.getStringInput("fromJobConfig.partitionColumn").setValue(
        cArgs.getOptionValue("paritionCol"));

    MToConfig toConfig = sqoopJob.getToJobConfig();
    toConfig.getStringInput("toJobConfig.outputDirectory").setValue(
        cArgs.getOptionValue("outputDir") + System.currentTimeMillis());
    MDriverConfig driverConfig = sqoopJob.getDriverConfig();
    if (cArgs.getOptionValue("numE") != null) {
      driverConfig.getIntegerInput("throttlingConfig.numExtractors").setValue(
          Integer.valueOf(cArgs.getOptionValue("numE")));
    }
    if (cArgs.getOptionValue("numL") != null) {

      driverConfig.getIntegerInput("throttlingConfig.numLoaders").setValue(
          Integer.valueOf(cArgs.getOptionValue("numL")));
    }
    RepositoryManager.getInstance().getRepository().createJob(sqoopJob);
    sparkJob.setJob(sqoopJob);
    sparkJob.execute(conf, context);
  }
 
Example 7
Source File: LinkRequestHandler.java    From sqoop-on-spark with Apache License 2.0 4 votes vote down vote up
/**
 * Create or Update link in repository.
 *
 * @param ctx Context object
 * @return Validation bean object
 */
private JsonBean createUpdateLink(RequestContext ctx, boolean create) {

  Repository repository = RepositoryManager.getInstance().getRepository();

  LinkBean linkBean = new LinkBean();
  try {
    JSONObject postData = JSONUtils.parse(ctx.getRequest().getReader());
    linkBean.restore(postData);
  } catch (IOException e) {
    throw new SqoopException(ServerError.SERVER_0003, "Can't read request content", e);
  }

  String username = ctx.getUserName();

  // Get link object
  List<MLink> links = linkBean.getLinks();
  if (links.size() != 1) {
    throw new SqoopException(ServerError.SERVER_0003,
        "Expected one link while parsing JSON request but got " + links.size());
  }

  MLink postedLink = links.get(0);

  // Authorization check
  if (create) {
    AuthorizationEngine.createLink(String.valueOf(postedLink.getConnectorId()));
  } else {
    AuthorizationEngine.updateLink(String.valueOf(postedLink.getConnectorId()),
            String.valueOf(postedLink.getPersistenceId()));
  }

  MLinkConfig linkConfig = ConnectorManager.getInstance()
      .getConnectorConfigurable(postedLink.getConnectorId()).getLinkConfig();
  if (!linkConfig.equals(postedLink.getConnectorLinkConfig())) {
    throw new SqoopException(ServerError.SERVER_0003, "Detected incorrect link config structure");
  }
  // if update get the link id from the request URI
  if (!create) {
    String linkIdentifier = ctx.getLastURLElement();
    // support linkName or linkId for the api
    long linkId = HandlerUtils.getLinkIdFromIdentifier(linkIdentifier, repository);
    if (postedLink.getPersistenceId() == MPersistableEntity.PERSISTANCE_ID_DEFAULT) {
      MLink existingLink = repository.findLink(linkId);
      postedLink.setPersistenceId(existingLink.getPersistenceId());
    }
  }
  // Associated connector for this link
  SqoopConnector connector = ConnectorManager.getInstance().getSqoopConnector(
      postedLink.getConnectorId());

  // Validate user supplied config data
  ConfigValidationResult connectorLinkConfigValidation = ConfigUtils.validateConfigs(postedLink
      .getConnectorLinkConfig().getConfigs(), connector.getLinkConfigurationClass());
  // Return back link validation result bean
  ValidationResultBean linkValidationBean = new ValidationResultBean(
      connectorLinkConfigValidation);

  // If we're good enough let's perform the action
  if (connectorLinkConfigValidation.getStatus().canProceed()) {
    if (create) {
      AuditLoggerManager.getInstance().logAuditEvent(ctx.getUserName(),
          ctx.getRequest().getRemoteAddr(), "create", "link",
          String.valueOf(postedLink.getPersistenceId()));
      postedLink.setCreationUser(username);
      postedLink.setLastUpdateUser(username);
      repository.createLink(postedLink);
      linkValidationBean.setId(postedLink.getPersistenceId());
    } else {
      AuditLoggerManager.getInstance().logAuditEvent(ctx.getUserName(),
          ctx.getRequest().getRemoteAddr(), "update", "link",
          String.valueOf(postedLink.getPersistenceId()));
      postedLink.setLastUpdateUser(username);
      repository.updateLink(postedLink);
    }
  }

  return linkValidationBean;
}