Java Code Examples for org.apache.sqoop.common.SqoopException

The following examples show how to use org.apache.sqoop.common.SqoopException. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: sqoop-on-spark   Source File: CommonRepositoryHandler.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Validate that the input override attribute adheres to the rules imposed
 * NOTE: all input names in a config class will and must be unique
 * Rule #1.
 * If editable == USER_ONLY ( cannot override itself ) can override other  CONNECTOR_ONLY and ANY inputs,
 * but cannot overriding other USER_ONLY attributes
 * Rule #2.
 * If editable == CONNECTOR_ONLY or ANY ( cannot override itself ) can override any other attribute in the config object
 * @param currentInput
 *
 */
private List<String> validateAndGetOverridesAttribute(MInput<?> currentInput, MConfig config) {

  // split the overrides string into comma separated list
  String overrides = currentInput.getOverrides();
  if (StringUtils.isEmpty(overrides)) {
    return null;
  }
  String[] overrideInputs = overrides.split("\\,");
  List<String> children = new ArrayList<String>();

  for (String override : overrideInputs) {
    if (override.equals(currentInput.getName())) {
      throw new SqoopException(CommonRepositoryError.COMMON_0046, "for input :"
          + currentInput.toString());
    }
    if (currentInput.getEditable().equals(InputEditable.USER_ONLY)) {
      if (config.getUserOnlyEditableInputNames().contains(override)) {
        throw new SqoopException(CommonRepositoryError.COMMON_0045, "for input :"
            + currentInput.toString());
      }
    }
    children.add(override);
  }
  return children;
}
 
Example 2
Source Project: sqoop-on-spark   Source File: SubmissionRequestHandler.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public JsonBean handleEvent(RequestContext ctx) {

  // submission only support GET requests
  if (ctx.getMethod() != Method.GET) {
    throw new SqoopException(ServerError.SERVER_0002, "Unsupported HTTP method for connector:"
        + ctx.getMethod());
  }
  String jobIdentifier = ctx.getLastURLElement();
  Repository repository = RepositoryManager.getInstance().getRepository();
  // submissions per job are ordered by update time
  // hence the latest submission is on the top
  if (ctx.getParameterValue(JOB_NAME_QUERY_PARAM) != null) {
    jobIdentifier = ctx.getParameterValue(JOB_NAME_QUERY_PARAM);
    AuditLoggerManager.getInstance().logAuditEvent(ctx.getUserName(),
        ctx.getRequest().getRemoteAddr(), "get", "submissionsByJob", jobIdentifier);
      long jobId = HandlerUtils.getJobIdFromIdentifier(jobIdentifier, repository);
      return getSubmissionsForJob(jobId);
  } else {
    // all submissions in the system
    AuditLoggerManager.getInstance().logAuditEvent(ctx.getUserName(),
        ctx.getRequest().getRemoteAddr(), "get", "submissions", "all");
    return getSubmissions();
  }
}
 
Example 3
Source Project: sqoop-on-spark   Source File: TestJobManager.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testDisabledJob() {
  MJob testJob = job(123l, 456l);
  testJob.setEnabled(false);
  testJob.setPersistenceId(1111);
  SqoopException exception = new SqoopException(DriverError.DRIVER_0009, "Job id: "
      + testJob.getPersistenceId());

  MJob mJobSpy = org.mockito.Mockito.spy(testJob);
  when(repositoryManagerMock.getRepository()).thenReturn(jdbcRepoMock);
  when(jdbcRepoMock.findJob(123l)).thenReturn(mJobSpy);
  try {
    jobManager.getJob(123l);
  } catch (SqoopException ex) {
    assertEquals(ex.getMessage(), exception.getMessage());
    verify(repositoryManagerMock, times(1)).getRepository();
    verify(jdbcRepoMock, times(1)).findJob(123l);
  }
}
 
Example 4
Source Project: sqoop-on-spark   Source File: FileAuditLogger.java    License: Apache License 2.0 6 votes vote down vote up
public void initialize() {
  config = getLoggerConfig();

  String outputFile = config.get(FILE);
  if (outputFile == null) {
    throw new SqoopException(AuditLoggerError.AUDIT_0002);
  }

  // setup logger
  String appender = "log4j.appender." + getLoggerName() + APPENDER_SURFIX;
  LOG.warn("appender: " + appender);
  props.put(appender, "org.apache.log4j.RollingFileAppender");
  props.put(appender + ".File", outputFile);
  props.put(appender + ".layout", "org.apache.log4j.PatternLayout");
  props.put(appender + ".layout.ConversionPattern", "%d %-5p %c: %m%n");
  props.put("log4j.logger." + getLoggerName(), "INFO," + getLoggerName() + APPENDER_SURFIX);
  PropertyConfigurator.configure(props);

  logger = Logger.getLogger(getLoggerName());
}
 
Example 5
Source Project: sqoop-on-spark   Source File: SecurityFactory.java    License: Apache License 2.0 6 votes vote down vote up
public static AuthorizationAccessController getAuthorizationAccessController(String accessController) throws ClassNotFoundException, IllegalAccessException, InstantiationException {

    Class<?> accessControllerClass = ClassUtils.loadClass(accessController);

    if (accessControllerClass == null) {
      throw new SqoopException(SecurityError.AUTH_0008,
              "Authorization Access Controller Class is null: " + accessController);
    }

    AuthorizationAccessController newAccessController;
    try {
      newAccessController = (AuthorizationAccessController) accessControllerClass.newInstance();
    } catch (Exception ex) {
      throw new SqoopException(SecurityError.AUTH_0008,
              "Authorization Access Controller Class Exception: " + accessController, ex);
    }
    return newAccessController;
  }
 
Example 6
Source Project: sqoop-on-spark   Source File: CommonRepositoryHandler.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public void enableJob(long jobId, boolean enabled, Connection conn) {
  PreparedStatement enableConn = null;

  try {
    enableConn = conn.prepareStatement(crudQueries.getStmtEnableJob());
    enableConn.setBoolean(1, enabled);
    enableConn.setLong(2, jobId);
    enableConn.executeUpdate();
  } catch (SQLException ex) {
    logException(ex, jobId);
    throw new SqoopException(CommonRepositoryError.COMMON_0039, ex);
  } finally {
    closeStatements(enableConn);
  }
}
 
Example 7
Source Project: sqoop-on-spark   Source File: SqoopLoader.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public Object readContent() throws InterruptedException {
  acquireSema();
  if (writerFinished) {
    return null;
  }
  try {
    return toDataFormat.getData();
  } catch (Throwable t) {
    readerFinished = true;
    LOG.error("Caught exception e while getting content ", t);
    throw new SqoopException(MRExecutionError.MAPRED_EXEC_0018, t);
  } finally {
    releaseSema();
  }
}
 
Example 8
Source Project: sqoop-on-spark   Source File: CommonRepositoryHandler.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public void deleteLink(long linkId, Connection conn) {
  PreparedStatement dltConn = null;

  try {
    deleteLinkInputs(linkId, conn);
    dltConn = conn.prepareStatement(crudQueries.getStmtDeleteLink());
    dltConn.setLong(1, linkId);
    dltConn.executeUpdate();
  } catch (SQLException ex) {
    logException(ex, linkId);
    throw new SqoopException(CommonRepositoryError.COMMON_0019, ex);
  } finally {
    closeStatements(dltConn);
  }
}
 
Example 9
Source Project: sqoop-on-spark   Source File: TestSqoopLoader.java    License: Apache License 2.0 6 votes vote down vote up
@Test(expectedExceptions = SqoopException.class)
public void testSuccessfulLoader() throws Throwable {
  SqoopLoader executor = new SqoopLoader(jobContextMock,
      GoodLoader.class.getName(), getIDF(), getMatcher());
  RecordWriter<Text, NullWritable> writer = executor
      .getRecordWriter();
  IntermediateDataFormat<?> dataFormat = MRJobTestUtil.getTestIDF();
  StringBuilder builder = new StringBuilder();
  for (int count = 0; count < 100; count++) {
    builder.append(String.valueOf(count));
    if (count != 99) {
      builder.append(",");
    }
  }
  dataFormat.setCSVTextData(builder.toString());
  writer.write(new Text(dataFormat.getCSVTextData()), null);

  // Allow writer to complete.
  TimeUnit.SECONDS.sleep(5);
  writer.close(null);
  verify(jobContextMock, times(1)).getConfiguration();
  verify(jobContextMock, times(1)).getCounter(SqoopCounters.ROWS_WRITTEN);
}
 
Example 10
Source Project: sqoop-on-spark   Source File: TestJdbcRepository.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Test the procedure when the driverConfig auto upgrade option is enabled
 */
@Test
public void testDriverConfigEnableAutoUpgrade() {
  MDriver newDriver = driver();
  MDriver oldDriver = anotherDriver();

  when(repoHandlerMock.findDriver(anyString(), any(Connection.class))).thenReturn(oldDriver);

  // make the upgradeDriverConfig to throw an exception to prove that it has been called
  SqoopException exception = new SqoopException(RepositoryError.JDBCREPO_0000,
      "upgradeDriverConfig() has been called.");
  doThrow(exception).when(repoHandlerMock).findJobs(any(Connection.class));

  try {
    repoSpy.registerDriver(newDriver, true);
  } catch (SqoopException ex) {
    assertEquals(ex.getMessage(), exception.getMessage());
    verify(repoHandlerMock, times(1)).findDriver(anyString(), any(Connection.class));
    verify(repoHandlerMock, times(1)).findJobs(any(Connection.class));
    verifyNoMoreInteractions(repoHandlerMock);
    return ;
  }

  fail("Should throw out an exception with message: " + exception.getMessage());
}
 
Example 11
Source Project: sqoop-on-spark   Source File: TestJobHandling.java    License: Apache License 2.0 6 votes vote down vote up
@Test(expectedExceptions = SqoopException.class)
public void testIncorrectEntityCausingConfigUpdate() throws Exception {
  loadJobsForLatestVersion();

  assertCountForTable("SQOOP.SQ_JOB", 4);
  assertCountForTable("SQOOP.SQ_JOB_INPUT", 24);
  MJob job = handler.findJob(1, derbyConnection);

  List<MConfig> fromConfigs = job.getJobConfig(Direction.FROM).getConfigs();
  MConfig fromConfig = fromConfigs.get(0).clone(false);
  MConfig newFromConfig = new MConfig(fromConfig.getName(), fromConfig.getInputs());
  HashMap<String, String> newMap = new HashMap<String, String>();
  newMap.put("1", "foo");
  newMap.put("2", "bar");

  ((MMapInput) newFromConfig.getInputs().get(1)).setValue(newMap);

  handler.updateJobConfig(job.getPersistenceId(), newFromConfig, MConfigUpdateEntityType.USER,
      derbyConnection);
}
 
Example 12
Source Project: incubator-sentry   Source File: SqoopAuthBinding.java    License: Apache License 2.0 6 votes vote down vote up
public List<MPrivilege> listPrivilegeByRole(final Subject subject, final String role, final MResource resource) throws SqoopException {
  Set<TSentryPrivilege> tSentryPrivileges = execute(new Command<Set<TSentryPrivilege>>() {
    @Override
    public Set<TSentryPrivilege> run(SentryGenericServiceClient client)
        throws Exception {
      if (resource == null) {
        return client.listPrivilegesByRoleName(subject.getName(), role, COMPONENT_TYPE, sqoopServer.getName());
      } else if (resource.getType().equalsIgnoreCase(MResource.TYPE.SERVER.name())) {
        return client.listPrivilegesByRoleName(subject.getName(), role, COMPONENT_TYPE, resource.getName());
      } else {
        return client.listPrivilegesByRoleName(subject.getName(), role, COMPONENT_TYPE, sqoopServer.getName(), toAuthorizable(resource));
      }
    }
  });

  List<MPrivilege> privileges = Lists.newArrayList();
  for (TSentryPrivilege tSentryPrivilege : tSentryPrivileges) {
    privileges.add(toSqoopPrivilege(tSentryPrivilege));
  }
  return privileges;
}
 
Example 13
Source Project: incubator-sentry   Source File: SentryAccessController.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void grantPrivileges(List<MPrincipal> principals, List<MPrivilege> privileges)
    throws SqoopException {
  for (MPrincipal principal : principals) {
    PrincipalDesc principalDesc = PrincipalDesc.fromStr(principal.getName(), principal.getType());
    if (principalDesc.getType() != PrincipalType.ROLE) {
      throw new SqoopException(SecurityError.AUTH_0014,
          SentrySqoopError.GRANT_REVOKE_PRIVILEGE_NOT_SUPPORT_FOR_PRINCIPAL
              + principalDesc.getType().name());
    }

    for (MPrivilege privilege : privileges) {
      if (LOG.isDebugEnabled()) {
        LOG.debug("Going to grant privilege : " + privilege +
            " to principal: " + principal);
      }
      binding.grantPrivilege(getSubject(), principal.getName(), privilege);
    }
  }
}
 
Example 14
Source Project: sqoop-on-spark   Source File: JdbcRepository.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public void deleteLink(final long linkId) {
  doWithConnection(new DoWithConnection() {
    @Override
    public Object doIt(Connection conn) {
      if(!handler.existsLink(linkId, conn)) {
        throw new SqoopException(RepositoryError.JDBCREPO_0017,
          "Invalid id: " + linkId);
      }
      if(handler.inUseLink(linkId, conn)) {
        throw new SqoopException(RepositoryError.JDBCREPO_0021,
          "Id in use: " + linkId);
      }

      handler.deleteLink(linkId, conn);
      return null;
    }
  });
}
 
Example 15
Source Project: sqoop-on-spark   Source File: ThrowableDisplayer.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Error hook installed to Groovy shell.
 *
 * Will display exception that appeared during executing command. In most
 * cases we will simply delegate the call to printing throwable method,
 * however in case that we've received ClientError.CLIENT_0006 (server
 * exception), we will unwrap server issue and view only that as local
 * context shouldn't make any difference.
 *
 * @param t Throwable to be displayed
 */
public static void errorHook(Throwable t) {
  // Based on the kind of exception we are dealing with, let's provide different user experince
  if(t instanceof SqoopException && ((SqoopException)t).getErrorCode() == ShellError.SHELL_0006) {
    println("@|red Server has returned exception: |@");
    printThrowable(t.getCause(), isVerbose());
  } else if(t instanceof SqoopException && ((SqoopException)t).getErrorCode() == ShellError.SHELL_0003) {
    print("@|red Invalid command invocation: |@");
    // In most cases the cause will be actual parsing error, so let's print that alone
    if (t.getCause() != null) {
      println(t.getCause().getMessage());
    } else {
      println(t.getMessage());
    }
  } else if(t.getClass() == MissingPropertyException.class) {
    print("@|red Unknown command: |@");
    println(t.getMessage());
  } else {
    println("@|red Exception has occurred during processing command |@");
    printThrowable(t, isVerbose());
  }
}
 
Example 16
Source Project: sqoop-on-spark   Source File: ConfigUtils.java    License: Apache License 2.0 6 votes vote down vote up
private static void checkForValidConfigName(Set<String> existingConfigNames,
    String customConfigName) {
  // uniqueness across fields check
  if (existingConfigNames.contains(customConfigName)) {
    throw new SqoopException(ModelError.MODEL_012,
        "Issue with field config name " + customConfigName);
  }

  if (!Character.isJavaIdentifierStart(customConfigName.toCharArray()[0])) {
    throw new SqoopException(ModelError.MODEL_013,
        "Issue with field config name " + customConfigName);
  }
  for (Character c : customConfigName.toCharArray()) {
    if (Character.isJavaIdentifierPart(c))
      continue;
    throw new SqoopException(ModelError.MODEL_013,
        "Issue with field config name " + customConfigName);
  }

  if (customConfigName.length() > 30) {
    throw new SqoopException(ModelError.MODEL_014,
        "Issue with field config name " + customConfigName);

  }
}
 
Example 17
Source Project: sqoop-on-spark   Source File: GenericJdbcExecutor.java    License: Apache License 2.0 6 votes vote down vote up
public String[] getQueryColumns(String query) {
  try {
    Statement statement = connection.createStatement(
        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
    ResultSet rs = statement.executeQuery(query);

    ResultSetMetaData rsmd = rs.getMetaData();
    int count = rsmd.getColumnCount();
    String[] columns = new String[count];
    for (int i = 0; i < count; i++) {
      columns[i] = rsmd.getColumnName(i+1);
    }

    return columns;

  } catch (SQLException e) {
    logSQLException(e);
    throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0003, e);
  }
}
 
Example 18
Source Project: sqoop-on-spark   Source File: GenericJdbcExecutor.java    License: Apache License 2.0 6 votes vote down vote up
public boolean existTable(String table) {
  try {
    String[] splitNames = dequalify(table);

    DatabaseMetaData dbmd = connection.getMetaData();
    ResultSet rs = dbmd.getTables(null, splitNames[0], splitNames[1], null);

    if (rs.next()) {
      return true;
    } else {
      return false;
    }

  } catch (SQLException e) {
    logSQLException(e);
    throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0003, e);
  }
}
 
Example 19
Source Project: sqoop-on-spark   Source File: CommonRepositoryHandler.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public boolean inUseLink(long linkId, Connection conn) {
  PreparedStatement stmt = null;
  ResultSet rs = null;

  try {
    stmt = conn.prepareStatement(crudQueries.getStmtSelectJobsForLinkCheck());
    stmt.setLong(1, linkId);
    rs = stmt.executeQuery();

    // Should be always valid in case of count(*) query
    rs.next();

    return rs.getLong(1) != 0;

  } catch (SQLException e) {
    logException(e, linkId);
    throw new SqoopException(CommonRepositoryError.COMMON_0029, e);
  } finally {
    closeResultSets(rs);
    closeStatements(stmt);
  }
}
 
Example 20
Source Project: sqoop-on-spark   Source File: TestSqoopConfiguration.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testBootstrapConfigurationInitFailure() {
  boolean success = false;
  try {
    String configDirPath = SqoopTestUtils.createEmptyConfigDirectory();
    System.setProperty(ConfigurationConstants.SYSPROP_CONFIG_DIR,
        configDirPath);
    SqoopConfiguration.getInstance().initialize();
  } catch (Exception ex) {
    Assert.assertTrue(ex instanceof SqoopException);
    Assert.assertSame(((SqoopException) ex).getErrorCode(),
        CoreError.CORE_0002);
    success = true;
  }

  Assert.assertTrue(success);
}
 
Example 21
Source Project: incubator-sentry   Source File: SqoopAuthBinding.java    License: Apache License 2.0 5 votes vote down vote up
public void dropPrivilege(final MResource resource) throws SqoopException {
  execute(new Command<Void>() {
    @Override
    public Void run(SentryGenericServiceClient client) throws Exception {
      TSentryPrivilege privilege = new TSentryPrivilege();
      privilege.setComponent(COMPONENT_TYPE);
      privilege.setServiceName(sqoopServer.getName());
      privilege.setAuthorizables(toTSentryAuthorizable(resource));
      privilege.setAction(SqoopActionConstant.ALL);
      client.dropPrivilege(bindingSubject.getName(), COMPONENT_TYPE, privilege);
      return null;
    }
  });
}
 
Example 22
@Override
public List<Partition> getPartitions(PartitionerContext context,
                                     LinkConfiguration linkConfiguration,
                                     FromJobConfiguration fromJobConfig) {

  List<Partition> partitions = new ArrayList<Partition>();
  String dsName = fromJobConfig.fromJobConfig.datasetName;
  LOG.info("Datasets to transfer from: " + dsName);
  try {
    List<String> datasets = retrieveDatasets(dsName, context, linkConfiguration);
    if (datasets.isEmpty()) {
      throw new SqoopException(MainframeConnectorError.GENERIC_MAINFRAME_CONNECTOR_0000, "No sequential datasets retrieved from " + dsName);
    } else {
      int count = datasets.size();
      int chunks = (int) Math.min(count, context.getMaxPartitions());
      for (int i = 0; i < chunks; i++) {
        partitions.add(new MainframeDatasetPartition());
      }

      int j = 0;
      while(j < count) {
        for (Partition partition : partitions) {
          if (j == count) {
            break;
          }
          ((MainframeDatasetPartition)partition).addDataset(datasets.get(j));
          j++;
        }
      }
    }
    return partitions;
  }
  catch (IOException ioe) {
    throw new SqoopException(MainframeConnectorError.GENERIC_MAINFRAME_CONNECTOR_0000, ioe.toString());
  }
}
 
Example 23
Source Project: sqoop-on-spark   Source File: SparkDataWriter.java    License: Apache License 2.0 5 votes vote down vote up
private void writeContent() {
  try {
    if (LOG.isDebugEnabled()) {
      //LOG.debug("Extracted data: " + fromIDF.getCSVTextData());
    }
    // NOTE: The fromIDF and the corresponding fromSchema is used only
    // for the
    // matching process
    // The output of the mappers is finally written to the toIDF object
    // after
    // the matching process
    // since the writable encapsulates the toIDF ==> new
    // SqoopWritable(toIDF)
    toIDF.setObjectData(matcher.getMatchingData(fromIDF.getObjectData()));
    // NOTE: We do not use the reducer to do the writing (a.k.a LOAD in
    // ETL).
    // Hence the mapper sets up the writable
    String toIDFClass = request.getDriverContext().getString(
        JobConstants.TO_INTERMEDIATE_DATA_FORMAT);
    IntermediateDataFormat<Object> newIDF = (IntermediateDataFormat<Object>) ClassUtils
        .instantiate(toIDFClass);
    newIDF.setSchema(toIDF.getSchema());

    
    newIDF.setCSVTextData(toIDF.getCSVTextData());
    newIDF.setData(toIDF.getData());
    newIDF.setObjectData(toIDF.getObjectData());
    request.addData(newIDF);


  } catch (Exception e) {
    throw new SqoopException(MRExecutionError.MAPRED_EXEC_0013, e);
  }
}
 
Example 24
Source Project: incubator-sentry   Source File: SentryAccessController.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public List<MPrivilege> getPrivilegesByPrincipal(MPrincipal principal,
    MResource resource) throws SqoopException {
  /**
   * Sentry Only supports get privilege by role
   */
  PrincipalDesc principalDesc = PrincipalDesc.fromStr(principal.getName(), principal.getType());
  if (principalDesc.getType() != PrincipalType.ROLE) {
    throw new SqoopException(SecurityError.AUTH_0014,
        SentrySqoopError.SHOW_PRIVILEGE_NOT_SUPPORTED_FOR_PRINCIPAL
            + principalDesc.getType().name());
  }
  return binding.listPrivilegeByRole(getSubject(), principalDesc.getName(), resource);
}
 
Example 25
Source Project: sqoop-on-spark   Source File: TestJobHandling.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testFindJob() throws Exception {
  // Let's try to find non existing job
  try {
    handler.findJob(1, derbyConnection);
    fail();
  } catch(SqoopException ex) {
    assertEquals(CommonRepositoryError.COMMON_0027, ex.getErrorCode());
  }

  loadJobsForLatestVersion();

  MJob firstJob = handler.findJob(1, derbyConnection);
  assertNotNull(firstJob);
  assertEquals(1, firstJob.getPersistenceId());
  assertEquals("JA0", firstJob.getName());

  List<MConfig> configs;

  configs = firstJob.getJobConfig(Direction.FROM).getConfigs();
  assertEquals(2, configs.size());
  assertEquals("Value5", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(0).getInputs().get(1).getValue());
  assertEquals("Value5", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(1).getInputs().get(1).getValue());

  configs = firstJob.getJobConfig(Direction.TO).getConfigs();
  assertEquals(2, configs.size());
  assertEquals("Value9", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(0).getInputs().get(1).getValue());
  assertEquals("Value9", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(1).getInputs().get(1).getValue());

  configs = firstJob.getDriverConfig().getConfigs();
  assertEquals(2, configs.size());
  assertEquals("Value13", configs.get(0).getInputs().get(0).getValue());
  assertNull(configs.get(0).getInputs().get(1).getValue());
  assertEquals("Value15", configs.get(1).getInputs().get(0).getValue());
  assertNull(configs.get(1).getInputs().get(1).getValue());
}
 
Example 26
Source Project: sqoop-on-spark   Source File: SqoopIDFUtils.java    License: Apache License 2.0 5 votes vote down vote up
public static String toCSVBit(Object obj) {
  String bitStringValue = obj.toString();
  if ((TRUE_BIT_SET.contains(bitStringValue)) || (FALSE_BIT_SET.contains(bitStringValue))) {
    return bitStringValue;
  } else {
    throw new SqoopException(CSVIntermediateDataFormatError.CSV_INTERMEDIATE_DATA_FORMAT_0005, " given bit value: "
        + bitStringValue);
  }
}
 
Example 27
Source Project: sqoop-on-spark   Source File: JobRequest.java    License: Apache License 2.0 5 votes vote down vote up
public Object getConnectorLinkConfig(Direction type) {
  switch (type) {
  case FROM:
    return fromConnectorLinkConfig;

  case TO:
    return toConnectorLinkConfig;

  default:
    throw new SqoopException(DirectionError.DIRECTION_0000, "Direction: " + type);
  }
}
 
Example 28
Source Project: sqoop-on-spark   Source File: MJob.java    License: Apache License 2.0 5 votes vote down vote up
public long getConnectorId(Direction type) {
  switch(type) {
    case FROM:
      return fromConnectorId;

    case TO:
      return toConnectorId;

    default:
      throw new SqoopException(DirectionError.DIRECTION_0000, "Direction: " + type);
  }
}
 
Example 29
Source Project: sqoop-on-spark   Source File: JdbcRepository.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public MConfig findToJobConfig(final long jobId, final String configName) {
  return (MConfig) doWithConnection(new DoWithConnection() {
    @Override
    public Object doIt(Connection conn) {
      if (!handler.existsJob(jobId, conn)) {
        throw new SqoopException(RepositoryError.JDBCREPO_0020, "Invalid id: " + jobId);
      }
      return handler.findToJobConfig(jobId, configName, conn);
    }
  });
}
 
Example 30
Source Project: sqoop-on-spark   Source File: MConfigList.java    License: Apache License 2.0 5 votes vote down vote up
public MInput getInput(String name) {
  String[] parts = name.split("\\.");
  if (parts.length != 2) {
    throw new SqoopException(ModelError.MODEL_009, name);
  }

  return getConfig(parts[0]).getInput(name);
}