org.apache.sqoop.common.SqoopException Java Examples

The following examples show how to use org.apache.sqoop.common.SqoopException. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: TestJdbcRepository.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * Test the procedure when the driverConfig auto upgrade option is enabled
 */
@Test
public void testDriverConfigEnableAutoUpgrade() {
  MDriver newDriver = driver();
  MDriver oldDriver = anotherDriver();

  when(repoHandlerMock.findDriver(anyString(), any(Connection.class))).thenReturn(oldDriver);

  // make the upgradeDriverConfig to throw an exception to prove that it has been called
  SqoopException exception = new SqoopException(RepositoryError.JDBCREPO_0000,
      "upgradeDriverConfig() has been called.");
  doThrow(exception).when(repoHandlerMock).findJobs(any(Connection.class));

  try {
    repoSpy.registerDriver(newDriver, true);
  } catch (SqoopException ex) {
    assertEquals(ex.getMessage(), exception.getMessage());
    verify(repoHandlerMock, times(1)).findDriver(anyString(), any(Connection.class));
    verify(repoHandlerMock, times(1)).findJobs(any(Connection.class));
    verifyNoMoreInteractions(repoHandlerMock);
    return ;
  }

  fail("Should throw out an exception with message: " + exception.getMessage());
}
 
Example #2
Source File: CommonRepositoryHandler.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public boolean inUseLink(long linkId, Connection conn) {
  PreparedStatement stmt = null;
  ResultSet rs = null;

  try {
    stmt = conn.prepareStatement(crudQueries.getStmtSelectJobsForLinkCheck());
    stmt.setLong(1, linkId);
    rs = stmt.executeQuery();

    // Should be always valid in case of count(*) query
    rs.next();

    return rs.getLong(1) != 0;

  } catch (SQLException e) {
    logException(e, linkId);
    throw new SqoopException(CommonRepositoryError.COMMON_0029, e);
  } finally {
    closeResultSets(rs);
    closeStatements(stmt);
  }
}
 
Example #3
Source File: SubmissionRequestHandler.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Override
public JsonBean handleEvent(RequestContext ctx) {

  // submission only support GET requests
  if (ctx.getMethod() != Method.GET) {
    throw new SqoopException(ServerError.SERVER_0002, "Unsupported HTTP method for connector:"
        + ctx.getMethod());
  }
  String jobIdentifier = ctx.getLastURLElement();
  Repository repository = RepositoryManager.getInstance().getRepository();
  // submissions per job are ordered by update time
  // hence the latest submission is on the top
  if (ctx.getParameterValue(JOB_NAME_QUERY_PARAM) != null) {
    jobIdentifier = ctx.getParameterValue(JOB_NAME_QUERY_PARAM);
    AuditLoggerManager.getInstance().logAuditEvent(ctx.getUserName(),
        ctx.getRequest().getRemoteAddr(), "get", "submissionsByJob", jobIdentifier);
      long jobId = HandlerUtils.getJobIdFromIdentifier(jobIdentifier, repository);
      return getSubmissionsForJob(jobId);
  } else {
    // all submissions in the system
    AuditLoggerManager.getInstance().logAuditEvent(ctx.getUserName(),
        ctx.getRequest().getRemoteAddr(), "get", "submissions", "all");
    return getSubmissions();
  }
}
 
Example #4
Source File: GenericJdbcExecutor.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
public boolean existTable(String table) {
  try {
    String[] splitNames = dequalify(table);

    DatabaseMetaData dbmd = connection.getMetaData();
    ResultSet rs = dbmd.getTables(null, splitNames[0], splitNames[1], null);

    if (rs.next()) {
      return true;
    } else {
      return false;
    }

  } catch (SQLException e) {
    logSQLException(e);
    throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0003, e);
  }
}
 
Example #5
Source File: TestJobManager.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test
public void testDisabledJob() {
  MJob testJob = job(123l, 456l);
  testJob.setEnabled(false);
  testJob.setPersistenceId(1111);
  SqoopException exception = new SqoopException(DriverError.DRIVER_0009, "Job id: "
      + testJob.getPersistenceId());

  MJob mJobSpy = org.mockito.Mockito.spy(testJob);
  when(repositoryManagerMock.getRepository()).thenReturn(jdbcRepoMock);
  when(jdbcRepoMock.findJob(123l)).thenReturn(mJobSpy);
  try {
    jobManager.getJob(123l);
  } catch (SqoopException ex) {
    assertEquals(ex.getMessage(), exception.getMessage());
    verify(repositoryManagerMock, times(1)).getRepository();
    verify(jdbcRepoMock, times(1)).findJob(123l);
  }
}
 
Example #6
Source File: FileAuditLogger.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
public void initialize() {
  config = getLoggerConfig();

  String outputFile = config.get(FILE);
  if (outputFile == null) {
    throw new SqoopException(AuditLoggerError.AUDIT_0002);
  }

  // setup logger
  String appender = "log4j.appender." + getLoggerName() + APPENDER_SURFIX;
  LOG.warn("appender: " + appender);
  props.put(appender, "org.apache.log4j.RollingFileAppender");
  props.put(appender + ".File", outputFile);
  props.put(appender + ".layout", "org.apache.log4j.PatternLayout");
  props.put(appender + ".layout.ConversionPattern", "%d %-5p %c: %m%n");
  props.put("log4j.logger." + getLoggerName(), "INFO," + getLoggerName() + APPENDER_SURFIX);
  PropertyConfigurator.configure(props);

  logger = Logger.getLogger(getLoggerName());
}
 
Example #7
Source File: CommonRepositoryHandler.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * Validate that the input override attribute adheres to the rules imposed
 * NOTE: all input names in a config class will and must be unique
 * Rule #1.
 * If editable == USER_ONLY ( cannot override itself ) can override other  CONNECTOR_ONLY and ANY inputs,
 * but cannot overriding other USER_ONLY attributes
 * Rule #2.
 * If editable == CONNECTOR_ONLY or ANY ( cannot override itself ) can override any other attribute in the config object
 * @param currentInput
 *
 */
private List<String> validateAndGetOverridesAttribute(MInput<?> currentInput, MConfig config) {

  // split the overrides string into comma separated list
  String overrides = currentInput.getOverrides();
  if (StringUtils.isEmpty(overrides)) {
    return null;
  }
  String[] overrideInputs = overrides.split("\\,");
  List<String> children = new ArrayList<String>();

  for (String override : overrideInputs) {
    if (override.equals(currentInput.getName())) {
      throw new SqoopException(CommonRepositoryError.COMMON_0046, "for input :"
          + currentInput.toString());
    }
    if (currentInput.getEditable().equals(InputEditable.USER_ONLY)) {
      if (config.getUserOnlyEditableInputNames().contains(override)) {
        throw new SqoopException(CommonRepositoryError.COMMON_0045, "for input :"
            + currentInput.toString());
      }
    }
    children.add(override);
  }
  return children;
}
 
Example #8
Source File: TestSqoopConfiguration.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test
public void testBootstrapConfigurationInitFailure() {
  boolean success = false;
  try {
    String configDirPath = SqoopTestUtils.createEmptyConfigDirectory();
    System.setProperty(ConfigurationConstants.SYSPROP_CONFIG_DIR,
        configDirPath);
    SqoopConfiguration.getInstance().initialize();
  } catch (Exception ex) {
    Assert.assertTrue(ex instanceof SqoopException);
    Assert.assertSame(((SqoopException) ex).getErrorCode(),
        CoreError.CORE_0002);
    success = true;
  }

  Assert.assertTrue(success);
}
 
Example #9
Source File: GenericJdbcExecutor.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
public String[] getQueryColumns(String query) {
  try {
    Statement statement = connection.createStatement(
        ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
    ResultSet rs = statement.executeQuery(query);

    ResultSetMetaData rsmd = rs.getMetaData();
    int count = rsmd.getColumnCount();
    String[] columns = new String[count];
    for (int i = 0; i < count; i++) {
      columns[i] = rsmd.getColumnName(i+1);
    }

    return columns;

  } catch (SQLException e) {
    logSQLException(e);
    throw new SqoopException(GenericJdbcConnectorError.GENERIC_JDBC_CONNECTOR_0003, e);
  }
}
 
Example #10
Source File: SecurityFactory.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
public static AuthorizationAccessController getAuthorizationAccessController(String accessController) throws ClassNotFoundException, IllegalAccessException, InstantiationException {

    Class<?> accessControllerClass = ClassUtils.loadClass(accessController);

    if (accessControllerClass == null) {
      throw new SqoopException(SecurityError.AUTH_0008,
              "Authorization Access Controller Class is null: " + accessController);
    }

    AuthorizationAccessController newAccessController;
    try {
      newAccessController = (AuthorizationAccessController) accessControllerClass.newInstance();
    } catch (Exception ex) {
      throw new SqoopException(SecurityError.AUTH_0008,
              "Authorization Access Controller Class Exception: " + accessController, ex);
    }
    return newAccessController;
  }
 
Example #11
Source File: CommonRepositoryHandler.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public void enableJob(long jobId, boolean enabled, Connection conn) {
  PreparedStatement enableConn = null;

  try {
    enableConn = conn.prepareStatement(crudQueries.getStmtEnableJob());
    enableConn.setBoolean(1, enabled);
    enableConn.setLong(2, jobId);
    enableConn.executeUpdate();
  } catch (SQLException ex) {
    logException(ex, jobId);
    throw new SqoopException(CommonRepositoryError.COMMON_0039, ex);
  } finally {
    closeStatements(enableConn);
  }
}
 
Example #12
Source File: ConfigUtils.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
private static void checkForValidConfigName(Set<String> existingConfigNames,
    String customConfigName) {
  // uniqueness across fields check
  if (existingConfigNames.contains(customConfigName)) {
    throw new SqoopException(ModelError.MODEL_012,
        "Issue with field config name " + customConfigName);
  }

  if (!Character.isJavaIdentifierStart(customConfigName.toCharArray()[0])) {
    throw new SqoopException(ModelError.MODEL_013,
        "Issue with field config name " + customConfigName);
  }
  for (Character c : customConfigName.toCharArray()) {
    if (Character.isJavaIdentifierPart(c))
      continue;
    throw new SqoopException(ModelError.MODEL_013,
        "Issue with field config name " + customConfigName);
  }

  if (customConfigName.length() > 30) {
    throw new SqoopException(ModelError.MODEL_014,
        "Issue with field config name " + customConfigName);

  }
}
 
Example #13
Source File: ThrowableDisplayer.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * Error hook installed to Groovy shell.
 *
 * Will display exception that appeared during executing command. In most
 * cases we will simply delegate the call to printing throwable method,
 * however in case that we've received ClientError.CLIENT_0006 (server
 * exception), we will unwrap server issue and view only that as local
 * context shouldn't make any difference.
 *
 * @param t Throwable to be displayed
 */
public static void errorHook(Throwable t) {
  // Based on the kind of exception we are dealing with, let's provide different user experince
  if(t instanceof SqoopException && ((SqoopException)t).getErrorCode() == ShellError.SHELL_0006) {
    println("@|red Server has returned exception: |@");
    printThrowable(t.getCause(), isVerbose());
  } else if(t instanceof SqoopException && ((SqoopException)t).getErrorCode() == ShellError.SHELL_0003) {
    print("@|red Invalid command invocation: |@");
    // In most cases the cause will be actual parsing error, so let's print that alone
    if (t.getCause() != null) {
      println(t.getCause().getMessage());
    } else {
      println(t.getMessage());
    }
  } else if(t.getClass() == MissingPropertyException.class) {
    print("@|red Unknown command: |@");
    println(t.getMessage());
  } else {
    println("@|red Exception has occurred during processing command |@");
    printThrowable(t, isVerbose());
  }
}
 
Example #14
Source File: SqoopLoader.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Override
public Object readContent() throws InterruptedException {
  acquireSema();
  if (writerFinished) {
    return null;
  }
  try {
    return toDataFormat.getData();
  } catch (Throwable t) {
    readerFinished = true;
    LOG.error("Caught exception e while getting content ", t);
    throw new SqoopException(MRExecutionError.MAPRED_EXEC_0018, t);
  } finally {
    releaseSema();
  }
}
 
Example #15
Source File: JdbcRepository.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public void deleteLink(final long linkId) {
  doWithConnection(new DoWithConnection() {
    @Override
    public Object doIt(Connection conn) {
      if(!handler.existsLink(linkId, conn)) {
        throw new SqoopException(RepositoryError.JDBCREPO_0017,
          "Invalid id: " + linkId);
      }
      if(handler.inUseLink(linkId, conn)) {
        throw new SqoopException(RepositoryError.JDBCREPO_0021,
          "Id in use: " + linkId);
      }

      handler.deleteLink(linkId, conn);
      return null;
    }
  });
}
 
Example #16
Source File: SentryAccessController.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
@Override
public void grantPrivileges(List<MPrincipal> principals, List<MPrivilege> privileges)
    throws SqoopException {
  for (MPrincipal principal : principals) {
    PrincipalDesc principalDesc = PrincipalDesc.fromStr(principal.getName(), principal.getType());
    if (principalDesc.getType() != PrincipalType.ROLE) {
      throw new SqoopException(SecurityError.AUTH_0014,
          SentrySqoopError.GRANT_REVOKE_PRIVILEGE_NOT_SUPPORT_FOR_PRINCIPAL
              + principalDesc.getType().name());
    }

    for (MPrivilege privilege : privileges) {
      if (LOG.isDebugEnabled()) {
        LOG.debug("Going to grant privilege : " + privilege +
            " to principal: " + principal);
      }
      binding.grantPrivilege(getSubject(), principal.getName(), privilege);
    }
  }
}
 
Example #17
Source File: CommonRepositoryHandler.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public void deleteLink(long linkId, Connection conn) {
  PreparedStatement dltConn = null;

  try {
    deleteLinkInputs(linkId, conn);
    dltConn = conn.prepareStatement(crudQueries.getStmtDeleteLink());
    dltConn.setLong(1, linkId);
    dltConn.executeUpdate();
  } catch (SQLException ex) {
    logException(ex, linkId);
    throw new SqoopException(CommonRepositoryError.COMMON_0019, ex);
  } finally {
    closeStatements(dltConn);
  }
}
 
Example #18
Source File: TestSqoopLoader.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test(expectedExceptions = SqoopException.class)
public void testSuccessfulLoader() throws Throwable {
  SqoopLoader executor = new SqoopLoader(jobContextMock,
      GoodLoader.class.getName(), getIDF(), getMatcher());
  RecordWriter<Text, NullWritable> writer = executor
      .getRecordWriter();
  IntermediateDataFormat<?> dataFormat = MRJobTestUtil.getTestIDF();
  StringBuilder builder = new StringBuilder();
  for (int count = 0; count < 100; count++) {
    builder.append(String.valueOf(count));
    if (count != 99) {
      builder.append(",");
    }
  }
  dataFormat.setCSVTextData(builder.toString());
  writer.write(new Text(dataFormat.getCSVTextData()), null);

  // Allow writer to complete.
  TimeUnit.SECONDS.sleep(5);
  writer.close(null);
  verify(jobContextMock, times(1)).getConfiguration();
  verify(jobContextMock, times(1)).getCounter(SqoopCounters.ROWS_WRITTEN);
}
 
Example #19
Source File: SqoopAuthBinding.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
public List<MPrivilege> listPrivilegeByRole(final Subject subject, final String role, final MResource resource) throws SqoopException {
  Set<TSentryPrivilege> tSentryPrivileges = execute(new Command<Set<TSentryPrivilege>>() {
    @Override
    public Set<TSentryPrivilege> run(SentryGenericServiceClient client)
        throws Exception {
      if (resource == null) {
        return client.listPrivilegesByRoleName(subject.getName(), role, COMPONENT_TYPE, sqoopServer.getName());
      } else if (resource.getType().equalsIgnoreCase(MResource.TYPE.SERVER.name())) {
        return client.listPrivilegesByRoleName(subject.getName(), role, COMPONENT_TYPE, resource.getName());
      } else {
        return client.listPrivilegesByRoleName(subject.getName(), role, COMPONENT_TYPE, sqoopServer.getName(), toAuthorizable(resource));
      }
    }
  });

  List<MPrivilege> privileges = Lists.newArrayList();
  for (TSentryPrivilege tSentryPrivilege : tSentryPrivileges) {
    privileges.add(toSqoopPrivilege(tSentryPrivilege));
  }
  return privileges;
}
 
Example #20
Source File: TestJobHandling.java    From sqoop-on-spark with Apache License 2.0 6 votes vote down vote up
@Test(expectedExceptions = SqoopException.class)
public void testIncorrectEntityCausingConfigUpdate() throws Exception {
  loadJobsForLatestVersion();

  assertCountForTable("SQOOP.SQ_JOB", 4);
  assertCountForTable("SQOOP.SQ_JOB_INPUT", 24);
  MJob job = handler.findJob(1, derbyConnection);

  List<MConfig> fromConfigs = job.getJobConfig(Direction.FROM).getConfigs();
  MConfig fromConfig = fromConfigs.get(0).clone(false);
  MConfig newFromConfig = new MConfig(fromConfig.getName(), fromConfig.getInputs());
  HashMap<String, String> newMap = new HashMap<String, String>();
  newMap.put("1", "foo");
  newMap.put("2", "bar");

  ((MMapInput) newFromConfig.getInputs().get(1)).setValue(newMap);

  handler.updateJobConfig(job.getPersistenceId(), newFromConfig, MConfigUpdateEntityType.USER,
      derbyConnection);
}
 
Example #21
Source File: KiteDatasetExecutor.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * Merges a dataset into this.
 */
public void mergeDataset(String uri) {
  FileSystemDataset<GenericRecord> update = Datasets.load(uri);
  if (dataset instanceof FileSystemDataset) {
    ((FileSystemDataset<GenericRecord>) dataset).merge(update);
    // And let's completely drop the temporary dataset
    Datasets.delete(uri);
  } else {
    throw new SqoopException(
        KiteConnectorError.GENERIC_KITE_CONNECTOR_0000, uri);
  }
}
 
Example #22
Source File: TestToInitializer.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test
@SuppressWarnings("unchecked")
public void testNonEmptyStageTable() throws Exception {
  LinkConfiguration linkConfig = new LinkConfiguration();
  ToJobConfiguration jobConfig = new ToJobConfiguration();

  String fullStageTableName = executor.delimitIdentifier(stageTableName);

  linkConfig.linkConfig.jdbcDriver = JdbcTestConstants.DRIVER;
  linkConfig.linkConfig.connectionString = JdbcTestConstants.URL;
  jobConfig.toJobConfig.tableName = schemalessTableName;
  jobConfig.toJobConfig.stageTableName = stageTableName;
  createTable(fullStageTableName);
  executor.executeUpdate("INSERT INTO " + fullStageTableName +
    " VALUES(1, 1.1, 'one')");
  MutableContext context = new MutableMapContext();
  InitializerContext initializerContext = new InitializerContext(context);

  @SuppressWarnings("rawtypes")
  Initializer initializer = new GenericJdbcToInitializer();
  try {
    initializer.initialize(initializerContext, linkConfig, jobConfig);
    fail("Initialization should fail for non-empty stage table.");
  } catch(SqoopException se) {
    //expected
  }
}
 
Example #23
Source File: TestJobHandling.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Test(expectedExceptions = SqoopException.class)
public void testNonExistingToConfigFetch() throws Exception {
  loadJobsForLatestVersion();

  assertCountForTable("SQOOP.SQ_JOB", 4);
  assertCountForTable("SQOOP.SQ_JOB_INPUT", 24);
  handler.findToJobConfig(1, "Non-ExistingC2JOB1", derbyConnection);
}
 
Example #24
Source File: DerbyRepositoryHandler.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * Pre-register Driver since the 1.99.3 release NOTE: This should be used only
 * in the upgrade path
 */
@Deprecated
protected long registerDriver(Connection conn) {
  if (LOG.isTraceEnabled()) {
    LOG.trace("Begin Driver loading.");
  }

  PreparedStatement baseDriverStmt = null;
  try {
    baseDriverStmt = conn.prepareStatement(STMT_INSERT_INTO_CONFIGURABLE,
        Statement.RETURN_GENERATED_KEYS);
    baseDriverStmt.setString(1, MDriver.DRIVER_NAME);
    baseDriverStmt.setString(2, Driver.getClassName());
    baseDriverStmt.setString(3, "1");
    baseDriverStmt.setString(4, MConfigurableType.DRIVER.name());

    int baseDriverCount = baseDriverStmt.executeUpdate();
    if (baseDriverCount != 1) {
      throw new SqoopException(DerbyRepoError.DERBYREPO_0003, Integer.toString(baseDriverCount));
    }

    ResultSet rsetDriverId = baseDriverStmt.getGeneratedKeys();

    if (!rsetDriverId.next()) {
      throw new SqoopException(DerbyRepoError.DERBYREPO_0004);
    }
    return rsetDriverId.getLong(1);
  } catch (SQLException ex) {
    throw new SqoopException(DerbyRepoError.DERBYREPO_0009, ex);
  } finally {
    closeStatements(baseDriverStmt);
  }
}
 
Example #25
Source File: SqoopAuthBinding.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
public void createRole(final Subject subject, final String role) throws SqoopException {
  execute(new Command<Void>() {
    @Override
    public Void run(SentryGenericServiceClient client) throws Exception {
      client.createRole(subject.getName(), role, COMPONENT_TYPE);
      return null;
    }
  });
}
 
Example #26
Source File: DerbyRepositoryHandler.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
/**
 * {@inheritDoc}
 */
@Override
public synchronized void shutdown() {
  String driver = repoContext.getDriverClass();
  if (driver != null && driver.equals(EMBEDDED_DERBY_DRIVER_CLASSNAME)) {
    // Using embedded derby. Needs explicit shutdown
    String connectUrl = repoContext.getConnectionUrl();
    if (connectUrl.startsWith("jdbc:derby:")) {
      int index = connectUrl.indexOf(';');
      String baseUrl = null;
      if (index != -1) {
        baseUrl = connectUrl.substring(0, index+1);
      } else {
        baseUrl = connectUrl + ";";
      }
      String shutDownUrl = baseUrl + "shutdown=true";

      LOG.debug("Attempting to shutdown embedded Derby using URL: "
          + shutDownUrl);

      try {
        DriverManager.getConnection(shutDownUrl);
      } catch (SQLException ex) {
        // Shutdown for one db instance is expected to raise SQL STATE 45000
        if (ex.getErrorCode() != 45000) {
          throw new SqoopException(
              DerbyRepoError.DERBYREPO_0001, shutDownUrl, ex);
        }
        LOG.info("Embedded Derby shutdown raised SQL STATE "
            + "45000 as expected.");
      }
    } else {
      LOG.warn("Even though embedded Derby driver was loaded, the connect "
          + "URL is of an unexpected config: " + connectUrl + ". Therefore no "
          + "attempt will be made to shutdown embedded Derby instance.");
    }
  }
}
 
Example #27
Source File: MainframeConnector.java    From spark-mainframe-connector with Apache License 2.0 5 votes vote down vote up
/**
 * @param jobType
 * @return Get job configuration class for given type or null if not supported
 */
@SuppressWarnings("rawtypes")
@Override
public Class getJobConfigurationClass(Direction jobType) {
  switch (jobType) {
    case FROM:
      return FromJobConfiguration.class;
    case TO:
      throw new SqoopException(MainframeConnectorError.GENERIC_MAINFRAME_CONNECTOR_0004);
    default:
      throw new SqoopException(
              MainframeConnectorError.GENERIC_MAINFRAME_CONNECTOR_0003,
              String.valueOf(jobType));
  }
}
 
Example #28
Source File: AuthorizationRequestHandler.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
private JsonBean getPrincipal(RequestContext ctx) {
  AuthorizationHandler handler = AuthorizationManager.getAuthorizationHandler();
  AuditLoggerManager manager = AuditLoggerManager.getInstance();
  String role_name = ctx.getParameterValue(ROLE_NAME_QUERY_PARAM);

  if (role_name != null) {
    // get principal by role
    MRole role = new MRole(role_name);
    manager.logAuditEvent(ctx.getUserName(),
            ctx.getRequest().getRemoteAddr(), "get", "principals by role", role.toString());
    return new PrincipalsBean(handler.getPrincipalsByRole(role));
  } else {
    throw new SqoopException(SecurityError.AUTH_0012, "Can't get role name");
  }
}
 
Example #29
Source File: DefaultAuthorizationAccessController.java    From sqoop-on-spark with Apache License 2.0 5 votes vote down vote up
@Override
public void revokePrivileges(List<MPrincipal> principals, List<MPrivilege> privileges) throws SqoopException {
  LOG.debug("Revoke privileges in default authorization access controller: empty function");
  for (MPrincipal principal : principals) {
    LOG.debug("principal: " + principal.toString());
  }
  if (privileges != null) { //Revoke all privileges on principal
    for (MPrivilege privilege : privileges) {
      LOG.debug("privilege: " + privilege.toString());
    }
  }
}
 
Example #30
Source File: RangerSqoopAuthorizerTest.java    From ranger with Apache License 2.0 5 votes vote down vote up
/**
 * yuwen create job by any two links failed
 */
@Test(expected = SqoopException.class)
public void createJobByAnyTwoLinksWithoutPermission() {
	String user = YUWEN;
	String link1 = getRandomLinkName();
	String link2 = getRandomLinkName();
	AuthorizationEngine.createJob(user, link1, link2);
}