azkaban.utils.Props Java Examples

The following examples show how to use azkaban.utils.Props. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: HadoopSecurityManager_H_2_0.java    From azkaban-plugins with Apache License 2.0 6 votes vote down vote up
public static HadoopSecurityManager getInstance(Props props)
    throws HadoopSecurityManagerException, IOException {
  if (hsmInstance == null) {
    synchronized (HadoopSecurityManager_H_2_0.class) {
      if (hsmInstance == null) {
        logger.info("getting new instance");
        hsmInstance = new HadoopSecurityManager_H_2_0(props);
      }
    }
  }

  logger.debug("Relogging in from keytab if necessary.");
  hsmInstance.reloginFromKeytab();

  return hsmInstance;
}
 
Example #2
Source File: HadoopSecurityManager_H_1_0.java    From azkaban-plugins with Apache License 2.0 5 votes vote down vote up
public String verifySecureProperty(Props props, String s)
    throws HadoopSecurityManagerException {
  String value = props.getString(s);
  if (value == null) {
    throw new HadoopSecurityManagerException(s + " not set in properties.");
  }
  // logger.info("Secure proxy configuration: Property " + s + " = " + value);
  return value;
}
 
Example #3
Source File: TeradataJob.java    From azkaban-plugins with Apache License 2.0 5 votes vote down vote up
public TeradataJob(String jobid, Props sysProps, Props jobProps, Logger log) {
  super(jobid, sysProps, jobProps, log);
  jobProps.put(LIB_JARS_KEY, sysProps.get(LIB_JARS_KEY));
  //Initialize TDWallet if it hasn't on current JVM.
  File tempDir = new File(sysProps.getString("azkaban.temp.dir", "temp"));
  TeraDataWalletInitializer.initialize(tempDir, new File(sysProps.get(TD_WALLET_JAR)));

  if(sysProps.containsKey(Whitelist.WHITE_LIST_FILE_PATH_KEY)) {
    jobProps.put(Whitelist.WHITE_LIST_FILE_PATH_KEY, sysProps.getString(Whitelist.WHITE_LIST_FILE_PATH_KEY));
  }
}
 
Example #4
Source File: HadoopSecurityManager_H_2_0.java    From azkaban-plugins with Apache License 2.0 5 votes vote down vote up
public String verifySecureProperty(Props props, String s)
    throws HadoopSecurityManagerException {
  String value = props.getString(s);
  if (value == null) {
    throw new HadoopSecurityManagerException(s + " not set in properties.");
  }
  return value;
}
 
Example #5
Source File: HadoopSecurityManager_H_2_0.java    From azkaban-plugins with Apache License 2.0 5 votes vote down vote up
/**
 * Create a proxied user, taking all parameters, including which user to proxy
 * from provided Properties.
 */
@Override
public UserGroupInformation getProxiedUser(Props userProp)
    throws HadoopSecurityManagerException {
  String userToProxy = verifySecureProperty(userProp, USER_TO_PROXY);
  UserGroupInformation user = getProxiedUser(userToProxy);
  if (user == null) {
    throw new HadoopSecurityManagerException(
        "Proxy as any user in unsecured grid is not supported!");
  }
  return user;
}
 
Example #6
Source File: ComplianceAzkabanJob.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@Override
public int run(String[] args)
    throws Exception {
  if (args.length < 1) {
    System.out.println("Must provide properties file as first argument.");
    return 1;
  }
  Props props = new Props(null, args[0]);
  new ComplianceAzkabanJob(ComplianceAzkabanJob.class.getName(), props).run();
  return 0;
}
 
Example #7
Source File: ComplianceAzkabanJob.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
public ComplianceAzkabanJob(String id, Props props)
    throws IOException {
  super(id, Logger.getLogger(ComplianceAzkabanJob.class));
  this.conf = new Configuration();
  // new prop
  Properties properties = props.toProperties();
  Preconditions.checkArgument(properties.containsKey(GOBBLIN_COMPLIANCE_JOB_CLASS),
      "Missing required property " + GOBBLIN_COMPLIANCE_JOB_CLASS);
  String complianceJobClass = properties.getProperty(GOBBLIN_COMPLIANCE_JOB_CLASS);
  this.complianceJob = GobblinConstructorUtils.invokeConstructor(ComplianceJob.class, complianceJobClass, properties);
}
 
Example #8
Source File: AzkabanJobRunner.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
private Properties propsToProperties(Props props) {
  Properties properties = new Properties();
  for (String key : props.getKeySet()) {
    properties.put(key, props.getString(key));
  }
  return properties;
}
 
Example #9
Source File: AzkabanJobRunner.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
private AbstractJob constructAbstractJob(String name, Props jobProps) {
  try {
    return (AbstractJob) jobProps.getClass("job.class").getConstructor(String.class, Props.class)
        .newInstance(name, jobProps);
  } catch (ReflectiveOperationException roe) {
    try {
      return (AbstractJob) jobProps.getClass("job.class").getConstructor(String.class, Properties.class)
          .newInstance(name, propsToProperties(jobProps));
    } catch (ReflectiveOperationException exc) {
      throw new RuntimeException(exc);
    }
  }
}
 
Example #10
Source File: DatasetCleanerJob.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@Override
public int run(String[] args) throws Exception {
  if (args.length < 1) {
    System.out.println("Must provide properties file as first argument.");
    return 1;
  }
  Props props = new Props(null, args[0]);
  new DatasetCleanerJob(DatasetCleanerJob.class.getName(), props).run();
  return 0;
}
 
Example #11
Source File: DefaultHadoopSecurityManager.java    From azkaban-plugins with Apache License 2.0 5 votes vote down vote up
public static HadoopSecurityManager getInstance(Props props)
    throws HadoopSecurityManagerException, IOException {
  if (hsmInstance == null) {
    synchronized (DefaultHadoopSecurityManager.class) {
      if (hsmInstance == null) {
        logger.info("getting new instance");
        hsmInstance = new DefaultHadoopSecurityManager(props);
      }
    }
  }
  return hsmInstance;
}
 
Example #12
Source File: TrashCollectorJob.java    From incubator-gobblin with Apache License 2.0 5 votes vote down vote up
@Override
public int run(String[] args) throws Exception {
  if (args.length < 1) {
    System.out.println("Must provide properties file as first argument.");
    return 1;
  }
  Props props = new Props(null, args[0]);
  new TrashCollectorJob(TrashCollectorJob.class.getName(), props).run();
  return 0;
}
 
Example #13
Source File: DefaultHadoopSecurityManager.java    From azkaban-plugins with Apache License 2.0 5 votes vote down vote up
/**
 * Create a proxied user, taking all parameters, including which user to proxy
 * from provided Properties.
 */
@Override
public UserGroupInformation getProxiedUser(Props prop)
    throws HadoopSecurityManagerException {
  throw new HadoopSecurityManagerException(
      "No real Hadoop Security Manager is set!");
}
 
Example #14
Source File: HadoopSecurityManager_H_1_0.java    From azkaban-plugins with Apache License 2.0 5 votes vote down vote up
public static HadoopSecurityManager getInstance(Props props)
    throws HadoopSecurityManagerException, IOException {
  if (hsmInstance == null) {
    synchronized (HadoopSecurityManager_H_1_0.class) {
      if (hsmInstance == null) {
        logger.info("getting new instance");
        hsmInstance = new HadoopSecurityManager_H_1_0(props);
      }
    }
  }
  return hsmInstance;
}
 
Example #15
Source File: LdapUserManagerTest.java    From azkaban-ldap-usermanager with MIT License 5 votes vote down vote up
@Test
public void testInvalidIdPropertyThrowsUserManagerException() throws Exception {
    thrown.expect(UserManagerException.class);

    Props props = getProps();
    props.put(LdapUserManager.LDAP_USERID_PROPERTY, "invalidField");
    userManager = new LdapUserManager(props);
    userManager.getUser("gauss", "password");
}
 
Example #16
Source File: LdapUserManagerTest.java    From azkaban-ldap-usermanager with MIT License 5 votes vote down vote up
@Test
public void testInvalidEmailPropertyDoesNotThrowNullPointerException() throws Exception {
    Props props = getProps();
    props.put(LdapUserManager.LDAP_EMAIL_PROPERTY, "invalidField");
    userManager = new LdapUserManager(props);
    User user = userManager.getUser("gauss", "password");

    assertEquals("gauss", user.getUserId());
    assertEquals("", user.getEmail());
}
 
Example #17
Source File: LdapUserManagerTest.java    From azkaban-ldap-usermanager with MIT License 5 votes vote down vote up
@Test
public void testGetUserWithEmbeddedGroup() throws Exception {
    Props props = getProps();
    props.put(LdapUserManager.LDAP_ALLOWED_GROUPS, "svc-test");
    props.put(LdapUserManager.LDAP_EMBEDDED_GROUPS, "true");
    final LdapUserManager manager = new LdapUserManager(props);

    User user = manager.getUser("gauss", "password");

    assertEquals("gauss", user.getUserId());
    assertEquals("[email protected]", user.getEmail());
}
 
Example #18
Source File: LdapUserManagerTest.java    From azkaban-ldap-usermanager with MIT License 5 votes vote down vote up
@Test
public void testGetUserWithAllowedGroupThatGroupOfNames() throws Exception {
    Props props = getProps();
    props.put(LdapUserManager.LDAP_ALLOWED_GROUPS, "svc-test2");
    final LdapUserManager manager = new LdapUserManager(props);

    User user = manager.getUser("gauss", "password");

    assertEquals("gauss", user.getUserId());
    assertEquals("[email protected]", user.getEmail());
}
 
Example #19
Source File: LdapUserManagerTest.java    From azkaban-ldap-usermanager with MIT License 5 votes vote down vote up
@Test
public void testGetUserWithAllowedGroup() throws Exception {
    Props props = getProps();
    props.put(LdapUserManager.LDAP_ALLOWED_GROUPS, "svc-test");
    final LdapUserManager manager = new LdapUserManager(props);

    User user = manager.getUser("gauss", "password");

    assertEquals("gauss", user.getUserId());
    assertEquals("[email protected]", user.getEmail());
}
 
Example #20
Source File: LdapUserManagerTest.java    From azkaban-ldap-usermanager with MIT License 5 votes vote down vote up
private Props getProps() {
    Props props = new Props();
    props.put(LdapUserManager.LDAP_HOST, "localhost");
    props.put(LdapUserManager.LDAP_PORT, "11389");
    props.put(LdapUserManager.LDAP_USE_SSL, "false");
    props.put(LdapUserManager.LDAP_USER_BASE, "dc=example,dc=com");
    props.put(LdapUserManager.LDAP_USERID_PROPERTY, "uid");
    props.put(LdapUserManager.LDAP_EMAIL_PROPERTY, "mail");
    props.put(LdapUserManager.LDAP_BIND_ACCOUNT, "cn=read-only-admin,dc=example,dc=com");
    props.put(LdapUserManager.LDAP_BIND_PASSWORD, "password");
    props.put(LdapUserManager.LDAP_ALLOWED_GROUPS, "");
    props.put(LdapUserManager.LDAP_GROUP_SEARCH_BASE, "dc=example,dc=com");
    return props;
}
 
Example #21
Source File: HadoopSecurityManager_H_1_0.java    From azkaban-plugins with Apache License 2.0 5 votes vote down vote up
/**
 * Create a proxied user, taking all parameters, including which user to proxy
 * from provided Properties.
 */
@Override
public UserGroupInformation getProxiedUser(Props userProp)
    throws HadoopSecurityManagerException {
  String userToProxy = verifySecureProperty(userProp, USER_TO_PROXY);
  UserGroupInformation user = getProxiedUser(userToProxy);
  if (user == null)
    throw new HadoopSecurityManagerException(
        "Proxy as any user in unsecured grid is not supported!");
  return user;
}
 
Example #22
Source File: LdapUserManager.java    From azkaban-ldap-usermanager with MIT License 5 votes vote down vote up
public LdapUserManager(Props props) {
    ldapHost = props.getString(LDAP_HOST);
    ldapPort = props.getInt(LDAP_PORT);
    useSsl = props.getBoolean(LDAP_USE_SSL);
    ldapUserBase = props.getString(LDAP_USER_BASE);
    ldapUserIdProperty = props.getString(LDAP_USERID_PROPERTY);
    ldapUEmailProperty = props.getString(LDAP_EMAIL_PROPERTY);
    ldapBindAccount = props.getString(LDAP_BIND_ACCOUNT);
    ldapBindPassword = props.getString(LDAP_BIND_PASSWORD);
    ldapAllowedGroups = props.getStringList(LDAP_ALLOWED_GROUPS);
    ldapAdminGroups = props.getStringList(LDAP_ADMIN_GROUPS);
    ldapGroupSearchBase = props.getString(LDAP_GROUP_SEARCH_BASE);
    ldapEmbeddedGroups = props.getBoolean(LDAP_EMBEDDED_GROUPS, false);
}
 
Example #23
Source File: DefaultHadoopSecurityManager.java    From azkaban-plugins with Apache License 2.0 4 votes vote down vote up
@Override
public void prefetchToken(File tokenFile, Props props, Logger logger)
    throws HadoopSecurityManagerException {
  throw new HadoopSecurityManagerException(
      "No real Hadoop Security Manager is set!");
}
 
Example #24
Source File: DefaultHadoopSecurityManager.java    From azkaban-plugins with Apache License 2.0 4 votes vote down vote up
private DefaultHadoopSecurityManager(Props props) {
  logger.info("Default Hadoop Security Manager is used. Only do this on "
      + "a non-hadoop cluster!");
}
 
Example #25
Source File: HadoopSecurityManager.java    From azkaban-plugins with Apache License 2.0 4 votes vote down vote up
public abstract void prefetchToken(File tokenFile, Props props, Logger logger)
throws HadoopSecurityManagerException;
 
Example #26
Source File: ParseLogJobAzkaban.java    From 163-bigdate-note with GNU General Public License v3.0 4 votes vote down vote up
public ParseLogJobAzkaban(String name, Props props) {
    super(name, props);
    this.inputPath = props.getString("input.path");
    this.outputPath = props.getString("output.path");
}
 
Example #27
Source File: HdfsToTeradataJob.java    From azkaban-plugins with Apache License 2.0 4 votes vote down vote up
public HdfsToTeradataJob(String jobid, Props sysProps, Props jobProps, Logger log) {
  super(jobid, sysProps, jobProps, log);
  getJobProps().put(LIB_JARS_HIVE_KEY, sysProps.get(LIB_JARS_HIVE_KEY));
}
 
Example #28
Source File: HdfsToTeradataJobRunnerMain.java    From azkaban-plugins with Apache License 2.0 4 votes vote down vote up
private String createLibJarStr(Props props) {
  if (TdchConstants.TDCH_HIVE_JOB_TYPE.equals(props.getString(TdchConstants.TDCH_JOB_TYPE, TdchConstants.DEFAULT_TDCH_JOB_TYPE))) {
    return props.getString(TdchConstants.LIB_JARS_HIVE_KEY);
  }
  return props.getString(TdchConstants.LIB_JARS_KEY);
}
 
Example #29
Source File: HdfsToTeradataJobRunnerMain.java    From azkaban-plugins with Apache License 2.0 4 votes vote down vote up
@VisibleForTesting
HdfsToTeradataJobRunnerMain(Properties jobProps, Decryptions decryptions) throws FileNotFoundException, IOException {
  _logger = JobUtils.initJobLogger();
  _logger.info("Job properties: " + jobProps);

  String logLevel = jobProps.getProperty(TdchConstants.TDCH_LOG_LEVEL);
  if(!StringUtils.isEmpty(logLevel)) {
    _logger.setLevel(Level.toLevel(logLevel));
  }
  _jobProps = jobProps;
  Props props = new Props(null, _jobProps);

  HadoopConfigurationInjector.injectResources(props);
  Configuration conf = new Configuration();
  UserGroupInformation.setConfiguration(conf);

  if (props.containsKey(Whitelist.WHITE_LIST_FILE_PATH_KEY)) {
    new Whitelist(props, FileSystem.get(conf)).validateWhitelisted(props);
  }

  String encryptedCredential = _jobProps.getProperty(TdchConstants.TD_ENCRYPTED_CREDENTIAL_KEY);
  String cryptoKeyPath = _jobProps.getProperty(TdchConstants.TD_CRYPTO_KEY_PATH_KEY);
  String password = null;

  if(encryptedCredential != null && cryptoKeyPath != null) {
    password = decryptions.decrypt(encryptedCredential, cryptoKeyPath, FileSystem.get(new Configuration()));
  }

  _params = TdchParameters.builder()
                          .mrParams(props.getMapByPrefix(TdchConstants.HADOOP_CONFIG_PREFIX_KEY).values())
                          .libJars(createLibJarStr(props))
                          .tdJdbcClassName(TdchConstants.TERADATA_JDBCDRIVER_CLASSNAME)
                          .teradataHostname(props.getString(TdchConstants.TD_HOSTNAME_KEY))
                          .fileFormat(_jobProps.getProperty(TdchConstants.HDFS_FILE_FORMAT_KEY))
                          .fieldSeparator(_jobProps.getProperty(TdchConstants.HDFS_FIELD_SEPARATOR_KEY))
                          .jobType(props.getString(TdchConstants.TDCH_JOB_TYPE, TdchConstants.DEFAULT_TDCH_JOB_TYPE))
                          .userName(props.getString(TdchConstants.TD_USERID_KEY))
                          .credentialName(_jobProps.getProperty(TdchConstants.TD_CREDENTIAL_NAME_KEY))
                          .password(password)
                          .avroSchemaPath(_jobProps.getProperty(TdchConstants.AVRO_SCHEMA_PATH_KEY))
                          .avroSchemaInline(_jobProps.getProperty(TdchConstants.AVRO_SCHEMA_INLINE_KEY))
                          .sourceHdfsPath(_jobProps.getProperty(TdchConstants.SOURCE_HDFS_PATH_KEY))
                          .targetTdTableName(props.getString(TdchConstants.TARGET_TD_TABLE_NAME_KEY))
                          .errorTdDatabase(_jobProps.getProperty(TdchConstants.ERROR_DB_KEY))
                          .errorTdTableName(_jobProps.getProperty(TdchConstants.ERROR_TABLE_KEY))
                          .tdInsertMethod(_jobProps.getProperty(TdchConstants.TD_INSERT_METHOD_KEY))
                          .numMapper(props.getInt(TdchConstants.TD_NUM_MAPPERS, TdchConstants.DEFAULT_NO_MAPPERS))
                          .hiveSourceDatabase(_jobProps.getProperty(TdchConstants.SOURCE_HIVE_DATABASE_NAME_KEY))
                          .hiveSourceTable(_jobProps.getProperty(TdchConstants.SOURCE_HIVE_TABLE_NAME_KEY))
                          .hiveConfFile(_jobProps.getProperty(TdchConstants.TDCH_HIVE_CONF_KEY))
                          .otherProperties(_jobProps.getProperty(TdchConstants.TD_OTHER_PROPERTIES_HOCON_KEY))
                          .build();
}
 
Example #30
Source File: TeradataToHdfsJobRunnerMain.java    From azkaban-plugins with Apache License 2.0 4 votes vote down vote up
@VisibleForTesting
TeradataToHdfsJobRunnerMain(Properties jobProps, Decryptions decryptions) throws FileNotFoundException, IOException {
  _logger = JobUtils.initJobLogger();
  _logger.info("Job properties: " + jobProps);

  _jobProps = jobProps;

  String logLevel = jobProps.getProperty(TdchConstants.TDCH_LOG_LEVEL);
  if(!StringUtils.isEmpty(logLevel)) {
    _logger.setLevel(Level.toLevel(logLevel));
  }

  Props props = new Props(null, _jobProps);
  HadoopConfigurationInjector.injectResources(props);
  Configuration conf = new Configuration();
  UserGroupInformation.setConfiguration(conf);

  if (props.containsKey(Whitelist.WHITE_LIST_FILE_PATH_KEY)) {
    new Whitelist(props, FileSystem.get(conf)).validateWhitelisted(props);
  }
  String encryptedCredential = _jobProps.getProperty(TdchConstants.TD_ENCRYPTED_CREDENTIAL_KEY);
  String cryptoKeyPath = _jobProps.getProperty(TdchConstants.TD_CRYPTO_KEY_PATH_KEY);
  String password = null;
  if(encryptedCredential != null && cryptoKeyPath != null) {
    password = new Decryptions().decrypt(encryptedCredential, cryptoKeyPath, FileSystem.get(new Configuration()));
  }

  _params = TdchParameters.builder()
                          .mrParams(props.getMapByPrefix(TdchConstants.HADOOP_CONFIG_PREFIX_KEY).values())
                          .libJars(props.getString(TdchConstants.LIB_JARS_KEY))
                          .tdJdbcClassName(TdchConstants.TERADATA_JDBCDRIVER_CLASSNAME)
                          .teradataHostname(props.getString(TdchConstants.TD_HOSTNAME_KEY))
                          .fileFormat(_jobProps.getProperty(TdchConstants.HDFS_FILE_FORMAT_KEY))
                          .fieldSeparator(_jobProps.getProperty(TdchConstants.HDFS_FIELD_SEPARATOR_KEY))
                          .jobType(props.getString(TdchConstants.TDCH_JOB_TYPE, TdchConstants.DEFAULT_TDCH_JOB_TYPE))
                          .userName(props.getString(TdchConstants.TD_USERID_KEY))
                          .credentialName(_jobProps.getProperty(TdchConstants.TD_CREDENTIAL_NAME_KEY))
                          .password(password)
                          .avroSchemaPath(_jobProps.getProperty(TdchConstants.AVRO_SCHEMA_PATH_KEY))
                          .avroSchemaInline(_jobProps.getProperty(TdchConstants.AVRO_SCHEMA_INLINE_KEY))
                          .sourceTdTableName(_jobProps.getProperty(TdchConstants.SOURCE_TD_TABLE_NAME_KEY))
                          .sourceQuery(_jobProps.getProperty(TdchConstants.SOURCE_TD_QUERY_NAME_KEY))
                          .targetHdfsPath(props.getString(TdchConstants.TARGET_HDFS_PATH_KEY))
                          .tdRetrieveMethod(_jobProps.getProperty(TdchConstants.TD_RETRIEVE_METHOD_KEY))
                          .numMapper(TdchConstants.DEFAULT_NO_MAPPERS)
                          .build();
}