Java Code Examples for org.apache.sqoop.model.MLink#setName()

The following examples show how to use org.apache.sqoop.model.MLink#setName() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: TestLinkHandling.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test(expectedExceptions = SqoopException.class)
public void testCreateDuplicateLink() throws Exception {
  MLink link = getLink();
  fillLink(link);
  link.setName("test");
  handler.createLink(link, getDerbyDatabaseConnection());
  assertEquals(1, link.getPersistenceId());

  link.setPersistenceId(MLink.PERSISTANCE_ID_DEFAULT);
  handler.createLink(link, getDerbyDatabaseConnection());
}
 
Example 2
Source File: TestLinkHandling.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test
public void testUpdateLink() throws Exception {
  loadLinksForLatestVersion();

  MLink link = handler.findLink(1, getDerbyDatabaseConnection());

  List<MConfig> configs;

  configs = link.getConnectorLinkConfig().getConfigs();
  ((MStringInput) configs.get(0).getInputs().get(0)).setValue("Updated");
  ((MMapInput) configs.get(0).getInputs().get(1)).setValue(null);
  ((MStringInput) configs.get(1).getInputs().get(0)).setValue("Updated");
  ((MMapInput) configs.get(1).getInputs().get(1)).setValue(null);

  link.setName("name");

  handler.updateLink(link, getDerbyDatabaseConnection());

  assertEquals(1, link.getPersistenceId());
  assertCountForTable("SQOOP.SQ_LINK", 2);
  assertCountForTable("SQOOP.SQ_LINK_INPUT", 6);

  MLink retrieved = handler.findLink(1, getDerbyDatabaseConnection());
  assertEquals("name", link.getName());

  configs = retrieved.getConnectorLinkConfig().getConfigs();
  assertEquals("Updated", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(0).getInputs().get(1).getValue());
  assertEquals("Updated", configs.get(1).getInputs().get(0).getValue());
  assertNull(configs.get(1).getInputs().get(1).getValue());
}
 
Example 3
Source File: TestLinkHandling.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test
public void testUpdateLink() throws Exception {
  MLink link = handler.findLink(1, provider.getConnection());

  List<MConfig> configs;

  configs = link.getConnectorLinkConfig().getConfigs();
  ((MStringInput) configs.get(0).getInputs().get(0)).setValue("Updated");
  ((MMapInput) configs.get(0).getInputs().get(1)).setValue(null);
  ((MStringInput) configs.get(1).getInputs().get(0)).setValue("Updated");
  ((MMapInput) configs.get(1).getInputs().get(1)).setValue(null);

  link.setName("name");

  handler.updateLink(link, provider.getConnection());

  assertEquals(1, link.getPersistenceId());
  Assert.assertEquals(provider.rowCount(new TableName("SQOOP", "SQ_LINK")), 2);
  Assert.assertEquals(provider.rowCount(new TableName("SQOOP", "SQ_LINK_INPUT")), 4);

  MLink retrieved = handler.findLink(1, provider.getConnection());
  assertEquals("name", link.getName());

  configs = retrieved.getConnectorLinkConfig().getConfigs();
  assertEquals("Updated", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(0).getInputs().get(1).getValue());
  assertEquals("Updated", configs.get(1).getInputs().get(0).getValue());
  assertNull(configs.get(1).getInputs().get(1).getValue());
}
 
Example 4
Source File: LinkBean.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
private MLink restoreLink(Object obj) {
  JSONObject object = (JSONObject) obj;
  long connectorId = (Long) object.get(CONNECTOR_ID);
  JSONArray connectorLinkConfig = (JSONArray) object.get(LINK_CONFIG_VALUES);
  List<MConfig> linkConfig = restoreConfigList(connectorLinkConfig);
  MLink link = new MLink(connectorId, new MLinkConfig(linkConfig));
  link.setPersistenceId((Long) object.get(ID));
  link.setName((String) object.get(NAME));
  link.setEnabled((Boolean) object.get(ENABLED));
  link.setCreationUser((String) object.get(CREATION_USER));
  link.setCreationDate(new Date((Long) object.get(CREATION_DATE)));
  link.setLastUpdateUser((String) object.get(UPDATE_USER));
  link.setLastUpdateDate(new Date((Long) object.get(UPDATE_DATE)));
  return link;
}
 
Example 5
Source File: BeanTestUtil.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
public static MLink createLink(String connectorName, String linkName, Long linkId, Date created,
    Date updated) {
  MLink link1 = getLink(connectorName);
  link1.setName(linkName);
  link1.setPersistenceId(linkId);
  link1.setCreationUser("admin");
  link1.setCreationDate(created);
  link1.setLastUpdateUser("user");
  link1.setLastUpdateDate(updated);
  link1.setEnabled(false);
  return link1;
}
 
Example 6
Source File: SqoopJDBCKafkaJob.java    From sqoop-on-spark with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {

    final SqoopSparkJob sparkJob = new SqoopSparkJob();
    CommandLine cArgs = SqoopSparkJob.parseArgs(createOptions(), args);
    SparkConf conf = sparkJob.init(cArgs);
    JavaSparkContext context = new JavaSparkContext(conf);

    MConnector fromConnector = RepositoryManager.getInstance().getRepository()
        .findConnector("generic-jdbc-connector");

    MLinkConfig fromLinkConfig = fromConnector.getLinkConfig();
    MLink fromLink = new MLink(fromConnector.getPersistenceId(), fromLinkConfig);
    fromLink.setName("jdbcLink-" + System.currentTimeMillis());

    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.jdbcDriver")
        .setValue("com.mysql.jdbc.Driver");

    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.connectionString")
        .setValue(cArgs.getOptionValue("jdbcString"));
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.username")
        .setValue(cArgs.getOptionValue("u"));
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.password")
        .setValue(cArgs.getOptionValue("p"));

    RepositoryManager.getInstance().getRepository().createLink(fromLink);

    MConnector toConnector = RepositoryManager.getInstance().getRepository()
        .findConnector("kafka-connector");

    MLinkConfig toLinkConfig = toConnector.getLinkConfig();

    MLink toLink = new MLink(toConnector.getPersistenceId(), toLinkConfig);
    toLink.setName("kafkaLink-" + System.currentTimeMillis());

    toLink.getConnectorLinkConfig().getStringInput("linkConfig.brokerList")
        .setValue(cArgs.getOptionValue("broker"));
    toLink.getConnectorLinkConfig().getStringInput("linkConfig.zookeeperConnect")
        .setValue(cArgs.getOptionValue("zk"));

    RepositoryManager.getInstance().getRepository().createLink(toLink);

    MFromConfig fromJobConfig = fromConnector.getFromConfig();
    MToConfig toJobConfig = toConnector.getToConfig();

    MJob sqoopJob = new MJob(fromConnector.getPersistenceId(), toConnector.getPersistenceId(),
        fromLink.getPersistenceId(), toLink.getPersistenceId(), fromJobConfig, toJobConfig, Driver
            .getInstance().getDriver().getDriverConfig());
    // jdbc configs
    MFromConfig fromConfig = sqoopJob.getFromJobConfig();
    fromConfig.getStringInput("fromJobConfig.tableName").setValue(cArgs.getOptionValue("table"));
    fromConfig.getStringInput("fromJobConfig.partitionColumn").setValue(cArgs.getOptionValue("partitionCol"));
    // kafka configs
    MToConfig toConfig = sqoopJob.getToJobConfig();
    toConfig.getStringInput("toJobConfig.topic").setValue("test-spark-topic");

    MDriverConfig driverConfig = sqoopJob.getDriverConfig();
    if (cArgs.getOptionValue("numE") != null) {
      driverConfig.getIntegerInput("throttlingConfig.numExtractors").setValue(
          Integer.valueOf(cArgs.getOptionValue("numE")));
    }
    if (cArgs.getOptionValue("numL") != null) {

      driverConfig.getIntegerInput("throttlingConfig.numLoaders").setValue(
          Integer.valueOf(cArgs.getOptionValue("numL")));
    }    RepositoryManager.getInstance().getRepository().createJob(sqoopJob);
    sparkJob.setJob(sqoopJob);
    sparkJob.execute(conf, context);
  }
 
Example 7
Source File: SqoopJDBCHDFSJob.java    From sqoop-on-spark with Apache License 2.0 4 votes vote down vote up
public static void main(String[] args) throws Exception {

    final SqoopSparkJob sparkJob = new SqoopSparkJob();
    CommandLine cArgs = SqoopSparkJob.parseArgs(createOptions(), args);
    SparkConf conf = sparkJob.init(cArgs);
    JavaSparkContext context = new JavaSparkContext(conf);

    MConnector fromConnector = RepositoryManager.getInstance().getRepository()
        .findConnector("generic-jdbc-connector");
    MConnector toConnector = RepositoryManager.getInstance().getRepository()
        .findConnector("hdfs-connector");

    MLinkConfig fromLinkConfig = fromConnector.getLinkConfig();
    MLinkConfig toLinkConfig = toConnector.getLinkConfig();

    MLink fromLink = new MLink(fromConnector.getPersistenceId(), fromLinkConfig);
    fromLink.setName("jdbcLink-" + System.currentTimeMillis());
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.jdbcDriver")
        .setValue("com.mysql.jdbc.Driver");

    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.connectionString")
        .setValue(cArgs.getOptionValue("jdbcString"));
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.username")
        .setValue(cArgs.getOptionValue("u"));
    fromLink.getConnectorLinkConfig().getStringInput("linkConfig.password")
        .setValue(cArgs.getOptionValue("p"));
    RepositoryManager.getInstance().getRepository().createLink(fromLink);

    MLink toLink = new MLink(toConnector.getPersistenceId(), toLinkConfig);
    toLink.setName("hdfsLink-" + System.currentTimeMillis());
    toLink.getConnectorLinkConfig().getStringInput("linkConfig.confDir")
        .setValue(cArgs.getOptionValue("outputDir"));
    RepositoryManager.getInstance().getRepository().createLink(toLink);

    MFromConfig fromJobConfig = fromConnector.getFromConfig();
    MToConfig toJobConfig = toConnector.getToConfig();

    MJob sqoopJob = new MJob(fromConnector.getPersistenceId(), toConnector.getPersistenceId(),
        fromLink.getPersistenceId(), toLink.getPersistenceId(), fromJobConfig, toJobConfig, Driver
            .getInstance().getDriver().getDriverConfig());

    MConfigList fromConfig = sqoopJob.getJobConfig(Direction.FROM);
    fromConfig.getStringInput("fromJobConfig.tableName").setValue(cArgs.getOptionValue("table"));
    fromConfig.getStringInput("fromJobConfig.partitionColumn").setValue(
        cArgs.getOptionValue("paritionCol"));

    MToConfig toConfig = sqoopJob.getToJobConfig();
    toConfig.getStringInput("toJobConfig.outputDirectory").setValue(
        cArgs.getOptionValue("outputDir") + System.currentTimeMillis());
    MDriverConfig driverConfig = sqoopJob.getDriverConfig();
    if (cArgs.getOptionValue("numE") != null) {
      driverConfig.getIntegerInput("throttlingConfig.numExtractors").setValue(
          Integer.valueOf(cArgs.getOptionValue("numE")));
    }
    if (cArgs.getOptionValue("numL") != null) {

      driverConfig.getIntegerInput("throttlingConfig.numLoaders").setValue(
          Integer.valueOf(cArgs.getOptionValue("numL")));
    }
    RepositoryManager.getInstance().getRepository().createJob(sqoopJob);
    sparkJob.setJob(sqoopJob);
    sparkJob.execute(conf, context);
  }
 
Example 8
Source File: CommonRepositoryHandler.java    From sqoop-on-spark with Apache License 2.0 4 votes vote down vote up
private List<MLink> loadLinks(PreparedStatement stmt,
                              Connection conn)
    throws SQLException {
  List<MLink> links = new ArrayList<MLink>();
  ResultSet rsConnection = null;
  PreparedStatement connectorConfigFetchStatement = null;
  PreparedStatement connectorConfigInputStatement = null;

  try {
    rsConnection = stmt.executeQuery();

    connectorConfigFetchStatement = conn.prepareStatement(crudQueries.getStmtSelectConfigForConfigurable());
    connectorConfigInputStatement = conn.prepareStatement(crudQueries.getStmtFetchLinkInput());

    while(rsConnection.next()) {
      long id = rsConnection.getLong(1);
      String name = rsConnection.getString(2);
      long connectorId = rsConnection.getLong(3);
      boolean enabled = rsConnection.getBoolean(4);
      String creationUser = rsConnection.getString(5);
      Date creationDate = rsConnection.getTimestamp(6);
      String updateUser = rsConnection.getString(7);
      Date lastUpdateDate = rsConnection.getTimestamp(8);

      connectorConfigFetchStatement.setLong(1, connectorId);
      connectorConfigInputStatement.setLong(1, id);
      connectorConfigInputStatement.setLong(3, id);

      List<MConfig> connectorLinkConfig = new ArrayList<MConfig>();
      List<MConfig> fromConfig = new ArrayList<MConfig>();
      List<MConfig> toConfig = new ArrayList<MConfig>();

      loadConnectorConfigs(connectorLinkConfig, fromConfig, toConfig, connectorConfigFetchStatement,
          connectorConfigInputStatement, 2, conn);
      MLink link = new MLink(connectorId, new MLinkConfig(connectorLinkConfig));

      link.setPersistenceId(id);
      link.setName(name);
      link.setCreationUser(creationUser);
      link.setCreationDate(creationDate);
      link.setLastUpdateUser(updateUser);
      link.setLastUpdateDate(lastUpdateDate);
      link.setEnabled(enabled);

      links.add(link);
    }
  } finally {
    closeResultSets(rsConnection);
    closeStatements(connectorConfigFetchStatement, connectorConfigInputStatement);
  }

  return links;
}
 
Example 9
Source File: TestOwnerPrivilege.java    From incubator-sentry with Apache License 2.0 4 votes vote down vote up
@Test
public void testLinkOwner() throws Exception {
  // USER1 at firstly has no privilege on any Sqoop resource
  SqoopClient client = sqoopServerRunner.getSqoopClient(USER1);
  assertTrue(client.getConnectors().size() == 0);
  /**
   * ADMIN_USER grant read action privilege on connector all to role ROLE1
   * ADMIN_USER grant role ROLE1 to group GROUP1
   */
  client = sqoopServerRunner.getSqoopClient(ADMIN_USER);
  MRole role1 = new MRole(ROLE1);
  MPrincipal group1 = new MPrincipal(GROUP1, MPrincipal.TYPE.GROUP);
  MResource  allConnector = new MResource(SqoopActionConstant.ALL, MResource.TYPE.CONNECTOR);
  MPrivilege readPriv = new MPrivilege(allConnector,SqoopActionConstant.READ, false);
  client.createRole(role1);
  client.grantRole(Lists.newArrayList(role1), Lists.newArrayList(group1));
  client.grantPrivilege(Lists.newArrayList(new MPrincipal(role1.getName(), MPrincipal.TYPE.ROLE)),
      Lists.newArrayList(readPriv));

  // check USER1 has the read privilege on all connector
  client = sqoopServerRunner.getSqoopClient(USER1);
  assertTrue(client.getConnectors().size() > 0);

  // USER1 create a new HDFS link
  MLink hdfsLink = client.createLink("hdfs-connector");
  sqoopServerRunner.fillHdfsLink(hdfsLink);
  sqoopServerRunner.saveLink(client, hdfsLink);

  // USER1 is the owner of HDFS link, so he can show and update HDFS link
  assertEquals(client.getLink(hdfsLink.getPersistenceId()), hdfsLink);

  // USER1 update the name of HDFS link
  hdfsLink.setName("HDFS_update1");
  sqoopServerRunner.updateLink(client, hdfsLink);

  // USER2 has no privilege on HDFS link
  client = sqoopServerRunner.getSqoopClient(USER2);
  assertTrue(client.getLinks().size() == 0);

  //delete the HDFS link
  client = sqoopServerRunner.getSqoopClient(USER1);
  client.deleteLink(hdfsLink.getPersistenceId());
}
 
Example 10
Source File: TestOwnerPrivilege.java    From incubator-sentry with Apache License 2.0 4 votes vote down vote up
@Test
public void testJobOwner() throws Exception {
  // USER3 at firstly has no privilege on any Sqoop resource
  SqoopClient client = sqoopServerRunner.getSqoopClient(USER3);
  assertTrue(client.getConnectors().size() == 0);
  /**
   * ADMIN_USER grant read action privilege on connector all to role ROLE3
   * ADMIN_USER grant role ROLE3 to group GROUP3
   */
  client = sqoopServerRunner.getSqoopClient(ADMIN_USER);
  MRole role3 = new MRole(ROLE3);
  MPrincipal group3 = new MPrincipal(GROUP3, MPrincipal.TYPE.GROUP);
  MResource  allConnector = new MResource(SqoopActionConstant.ALL, MResource.TYPE.CONNECTOR);
  MPrivilege readPriv = new MPrivilege(allConnector,SqoopActionConstant.READ, false);
  client.createRole(role3);
  client.grantRole(Lists.newArrayList(role3), Lists.newArrayList(group3));
  client.grantPrivilege(Lists.newArrayList(new MPrincipal(ROLE3, MPrincipal.TYPE.ROLE)),
      Lists.newArrayList(readPriv));

  // check USER3 has the read privilege on all connector
  client = sqoopServerRunner.getSqoopClient(USER3);
  assertTrue(client.getConnectors().size() > 0);

  // USER3 create two links: hdfs link and rdbm link
  MLink rdbmsLink = client.createLink("generic-jdbc-connector");
  sqoopServerRunner.fillRdbmsLinkConfig(rdbmsLink);
  sqoopServerRunner.saveLink(client, rdbmsLink);

  MLink hdfsLink = client.createLink("hdfs-connector");
  sqoopServerRunner.fillHdfsLink(hdfsLink);
  sqoopServerRunner.saveLink(client, hdfsLink);

  // USER3 is the owner of hdfs and link, so he can show and update hdfs link
  assertTrue(client.getLinks().size() == 2);
  hdfsLink.setName("HDFS_update2");
  client.updateLink(hdfsLink);
  rdbmsLink.setName("RDBM_update");
  client.updateLink(rdbmsLink);

  // USER_3 create a job: transfer date from HDFS to RDBM
  MJob job1 = client.createJob(hdfsLink.getPersistenceId(), rdbmsLink.getPersistenceId());
  // set HDFS "FROM" config for the job, since the connector test case base class only has utilities for HDFS!
  sqoopServerRunner.fillHdfsFromConfig(job1);

  // set the RDBM "TO" config here
  sqoopServerRunner.fillRdbmsToConfig(job1);

  // create job
  sqoopServerRunner.saveJob(client, job1);

  /**
   *  USER3 is the owner of job1 , so he can show and delete job1.
   *  USER4 has no privilege on job1
   */
  client = sqoopServerRunner.getSqoopClient(USER4);
  assertTrue(client.getJobs().size() == 0);
  try {
    client.deleteJob(job1.getPersistenceId());
    fail("expected Authorization exception happend");
  } catch (Exception e) {
    assertCausedMessage(e, SecurityError.AUTH_0014.getMessage());
  }
  client = sqoopServerRunner.getSqoopClient(USER3);
  assertEquals(client.getJob(job1.getPersistenceId()), job1);
  client.deleteJob(job1.getPersistenceId());

  // delete the HDFS and RDBM links
  client.deleteLink(hdfsLink.getPersistenceId());
  client.deleteLink(rdbmsLink.getPersistenceId());
}
 
Example 11
Source File: ConfigFiller.java    From sqoop-on-spark with Apache License 2.0 3 votes vote down vote up
/**
 * Fill link object based on user input.
 *
 * @param reader Associated console reader object
 * @param link Link that user is suppose to fill in
 * @param linkConfigBundle Connector resource bundle
 * @return True if we filled all inputs, false if user has stopped processing
 * @throws IOException
 */
public static boolean fillLinkWithBundle(ConsoleReader reader, MLink link, ResourceBundle linkConfigBundle)
    throws IOException {

  link.setName(getName(reader, link.getName()));
  return fillLinkConfigWithBundle(reader, link.getConnectorLinkConfig().getConfigs(), linkConfigBundle);
}
 
Example 12
Source File: ConfigFiller.java    From sqoop-on-spark with Apache License 2.0 2 votes vote down vote up
/**
 * Fill link object based on CLI options.
 *
 * @param line Associated command line options
 * @param link Link that user is suppose to fill in
 * @return True if we filled all inputs, false if user has stopped processing
 * @throws IOException
 */
public static boolean fillLink(CommandLine line, MLink link) throws IOException {

  link.setName(line.getOptionValue("name"));
  return fillLinkConfig(line, link.getConnectorLinkConfig().getConfigs());
}