Java Code Examples for org.apache.hadoop.security.UserGroupInformation.setConfiguration()

The following are Jave code examples for showing how to use setConfiguration() of the org.apache.hadoop.security.UserGroupInformation class. You can vote up the examples you like. Your votes will be used in our system to get more good examples.
+ Save this method
Example 1
Project: hadoop   File: TestSaslRPC.java   View Source Code Vote up 6 votes
@Before
public void setup() {
  LOG.info("---------------------------------");
  LOG.info("Testing QOP:"+ getQOPNames(qop));
  LOG.info("---------------------------------");
  conf = new Configuration();
  // the specific tests for kerberos will enable kerberos.  forcing it
  // for all tests will cause tests to fail if the user has a TGT
  conf.set(HADOOP_SECURITY_AUTHENTICATION, SIMPLE.toString());
  conf.set(HADOOP_RPC_PROTECTION, getQOPNames(qop));
  if (saslPropertiesResolver != null){
    conf.set(CommonConfigurationKeys.HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS,
      saslPropertiesResolver);
  }
  UserGroupInformation.setConfiguration(conf);
  enableSecretManager = null;
  forceSecretManager = null;
  clientFallBackToSimpleAllowed = true;
}
 
Example 2
Project: hadoop   File: RpcProgramMountd.java   View Source Code Vote up 6 votes
public RpcProgramMountd(NfsConfiguration config,
    DatagramSocket registrationSocket, boolean allowInsecurePorts)
    throws IOException {
  // Note that RPC cache is not enabled
  super("mountd", "localhost", config.getInt(
      NfsConfigKeys.DFS_NFS_MOUNTD_PORT_KEY,
      NfsConfigKeys.DFS_NFS_MOUNTD_PORT_DEFAULT), PROGRAM, VERSION_1,
      VERSION_3, registrationSocket, allowInsecurePorts);
  exports = new ArrayList<String>();
  exports.add(config.get(NfsConfigKeys.DFS_NFS_EXPORT_POINT_KEY,
      NfsConfigKeys.DFS_NFS_EXPORT_POINT_DEFAULT));
  this.hostsMatcher = NfsExports.getInstance(config);
  this.mounts = Collections.synchronizedList(new ArrayList<MountEntry>());
  UserGroupInformation.setConfiguration(config);
  SecurityUtil.login(config, NfsConfigKeys.DFS_NFS_KEYTAB_FILE_KEY,
      NfsConfigKeys.DFS_NFS_KERBEROS_PRINCIPAL_KEY);
  this.dfsClient = new DFSClient(NameNode.getAddress(config), config);
}
 
Example 3
Project: Transwarp-Sample-Code   File: Delete.java   View Source Code Vote up 6 votes
public static void main(String[] args) {
    String rootPath = "hdfs://nameservice1";
    Path p = new Path(rootPath + "/tmp/file.txt");
    Configuration conf = new Configuration();
    conf.addResource("core-site.xml");
    conf.addResource("hdfs-site.xml");
    conf.addResource("yarn-site.xml");
    try {
        // 没开kerberos,注释下面两行
        UserGroupInformation.setConfiguration(conf);
        UserGroupInformation.loginUserFromKeytab("[email protected]","E:\\星环\\hdfs.keytab");
        FileSystem fs = p.getFileSystem(conf);
        boolean b = fs.delete(p, true);
        System.out.println(b);
        fs.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}
 
Example 4
Project: hadoop   File: Gridmix.java   View Source Code Vote up 6 votes
public int run(final String[] argv) throws IOException, InterruptedException {
  int val = -1;
  final Configuration conf = getConf();
  UserGroupInformation.setConfiguration(conf);
  UserGroupInformation ugi = UserGroupInformation.getLoginUser();

  val = ugi.doAs(new PrivilegedExceptionAction<Integer>() {
    public Integer run() throws Exception {
      return runJob(conf, argv);
    }
  });
  
  // print the gridmix summary if the run was successful
  if (val == 0) {
      // print the run summary
      System.out.print("\n\n");
      System.out.println(summarizer.toString());
  }
  
  return val; 
}
 
Example 5
Project: hadoop-oss   File: RollingFileSystemSink.java   View Source Code Vote up 5 votes
@Override
public void init(SubsetConfiguration metrics2Properties) {
  properties = metrics2Properties;
  basePath = new Path(properties.getString(BASEPATH_KEY, BASEPATH_DEFAULT));
  source = properties.getString(SOURCE_KEY, SOURCE_DEFAULT);
  ignoreError = properties.getBoolean(IGNORE_ERROR_KEY, false);
  allowAppend = properties.getBoolean(ALLOW_APPEND_KEY, false);

  conf = loadConf();
  UserGroupInformation.setConfiguration(conf);

  // Don't do secure setup if it's not needed.
  if (UserGroupInformation.isSecurityEnabled()) {
    // Validate config so that we don't get an NPE
    checkForProperty(properties, KEYTAB_PROPERTY_KEY);
    checkForProperty(properties, USERNAME_PROPERTY_KEY);


    try {
      // Login as whoever we're supposed to be and let the hostname be pulled
      // from localhost. If security isn't enabled, this does nothing.
      SecurityUtil.login(conf, properties.getString(KEYTAB_PROPERTY_KEY),
          properties.getString(USERNAME_PROPERTY_KEY));
    } catch (IOException ex) {
      throw new MetricsException("Error logging in securely: ["
          + ex.toString() + "]", ex);
    }
  }
}
 
Example 6
Project: hadoop-oss   File: TestWebDelegationToken.java   View Source Code Vote up 5 votes
@After
public void cleanUp() throws Exception {
  jetty.stop();

  // resetting hadoop security to simple
  org.apache.hadoop.conf.Configuration conf =
      new org.apache.hadoop.conf.Configuration();
  UserGroupInformation.setConfiguration(conf);
}
 
Example 7
Project: hadoop   File: TestWebDelegationToken.java   View Source Code Vote up 5 votes
@After
public void cleanUp() throws Exception {
  jetty.stop();

  // resetting hadoop security to simple
  org.apache.hadoop.conf.Configuration conf =
      new org.apache.hadoop.conf.Configuration();
  UserGroupInformation.setConfiguration(conf);
}
 
Example 8
Project: hadoop   File: TestRMRestart.java   View Source Code Vote up 5 votes
@Before
public void setup() throws IOException {
  conf = getConf();
  Logger rootLogger = LogManager.getRootLogger();
  rootLogger.setLevel(Level.DEBUG);
  UserGroupInformation.setConfiguration(conf);
  conf.setBoolean(YarnConfiguration.RECOVERY_ENABLED, true);
  conf.setBoolean(YarnConfiguration.RM_WORK_PRESERVING_RECOVERY_ENABLED, false);
  conf.set(YarnConfiguration.RM_STORE, MemoryRMStateStore.class.getName());
  rmAddr = new InetSocketAddress("localhost", 8032);
  Assert.assertTrue(YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS > 1);
}
 
Example 9
Project: hadoop   File: TestWebHdfsTokens.java   View Source Code Vote up 5 votes
@BeforeClass
public static void setUp() {
  conf = new Configuration();
  SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
  UserGroupInformation.setConfiguration(conf);    
  UserGroupInformation.setLoginUser(
      UserGroupInformation.createUserForTesting(
          "LoginUser", new String[]{"supergroup"}));
}
 
Example 10
Project: hadoop   File: TestContainerResourceUsage.java   View Source Code Vote up 5 votes
@Before
public void setup() throws UnknownHostException {
  Logger rootLogger = LogManager.getRootLogger();
  rootLogger.setLevel(Level.DEBUG);
  conf = new YarnConfiguration();
  UserGroupInformation.setConfiguration(conf);
  conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
      YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS);
}
 
Example 11
Project: hadoop   File: TestRMRestart.java   View Source Code Vote up 5 votes
@Test (timeout = 60000)
public void testAppSubmissionWithOldDelegationTokenAfterRMRestart()
    throws Exception {
  conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2);
  conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
      "kerberos");
  conf.set(YarnConfiguration.RM_ADDRESS, "localhost:8032");
  UserGroupInformation.setConfiguration(conf);
  MemoryRMStateStore memStore = new MemoryRMStateStore();
  memStore.init(conf);

  MockRM rm1 = new TestSecurityMockRM(conf, memStore);
  rm1.start();

  GetDelegationTokenRequest request1 =
      GetDelegationTokenRequest.newInstance("renewer1");
  UserGroupInformation.getCurrentUser().setAuthenticationMethod(
      AuthMethod.KERBEROS);
  GetDelegationTokenResponse response1 =
      rm1.getClientRMService().getDelegationToken(request1);
  Token<RMDelegationTokenIdentifier> token1 =
      ConverterUtils.convertFromYarn(response1.getRMDelegationToken(), rmAddr);

  // start new RM
  MockRM rm2 = new TestSecurityMockRM(conf, memStore);
  rm2.start();

  // submit an app with the old delegation token got from previous RM.
  Credentials ts = new Credentials();
  ts.addToken(token1.getService(), token1);
  RMApp app = rm2.submitApp(200, "name", "user",
      new HashMap<ApplicationAccessType, String>(), false, "default", 1, ts);
  rm2.waitForState(app.getApplicationId(), RMAppState.ACCEPTED);
}
 
Example 12
Project: monarch   File: HDFSQuasiService.java   View Source Code Vote up 5 votes
public void createSecuredUserDir(String userName, String keytabdir) {
  try {
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation.loginUserFromKeytab(getHDFSPrincipal(""),
        keytabdir + File.separator + "hdfs.keytab");
    FileSystem fs = FileSystem.get(conf);
    Path userDir = new Path("/user" + File.separator + userName);
    fs.mkdirs(userDir, new FsPermission(FsAction.ALL, FsPermission.getDefault().getGroupAction(),
        FsPermission.getDefault().getOtherAction()));
    fs.setOwner(userDir, userName, "hadoop");
  } catch (IOException e) {
    e.printStackTrace();
  }

}
 
Example 13
Project: ditb   File: TestSecureRPC.java   View Source Code Vote up 5 votes
private UserGroupInformation loginKerberosPrincipal(String krbKeytab, String krbPrincipal)
    throws Exception {
  Configuration cnf = new Configuration();
  cnf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
  UserGroupInformation.setConfiguration(cnf);
  UserGroupInformation.loginUserFromKeytab(krbPrincipal, krbKeytab);
  return UserGroupInformation.getLoginUser();
}
 
Example 14
Project: monarch   File: ConfigurationUtils.java   View Source Code Vote up 5 votes
/**
 * Creates the hadoop configuration object from the properties specified for tierstore
 * 
 * @return configuration object
 */
public static Configuration getConfiguration(final Properties props) throws IOException {
  Configuration conf = new Configuration();
  String hdfsSiteXMLPath = props.getProperty(CommonConfig.HDFS_SITE_XML_PATH);
  String hadoopSiteXMLPath = props.getProperty(CommonConfig.HADOOP_SITE_XML_PATH);
  if (hdfsSiteXMLPath != null) {
    conf.addResource(Paths.get(hdfsSiteXMLPath).toUri().toURL());
  }
  if (hadoopSiteXMLPath != null) {
    conf.addResource(Paths.get(hadoopSiteXMLPath).toUri().toURL());
  }

  props.entrySet().forEach((PROP) -> {
    conf.set(String.valueOf(PROP.getKey()), String.valueOf(PROP.getValue()));
  });

  // set secured properties
  String userName = props.getProperty(CommonConfig.USER_NAME);
  String keytabPath = props.getProperty(CommonConfig.KEYTAB_PATH);
  if (userName == null || keytabPath == null) {
    if (props.containsKey(ENABLE_KERBEROS_AUTHC)
        && Boolean.parseBoolean(props.getProperty(ENABLE_KERBEROS_AUTHC))) {
      userName = props.getProperty(ResourceConstants.USER_NAME);
      keytabPath = props.getProperty(ResourceConstants.PASSWORD);
    }
  }

  // use the username and keytab
  if (userName != null && keytabPath != null) {
    // set kerberos authentication
    conf.set("hadoop.security.authentication", "kerberos");
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation.loginUserFromKeytab(userName, keytabPath);
  }
  return conf;
}
 
Example 15
Project: wherehowsX   File: SchemaFetch.java   View Source Code Vote up 4 votes
public SchemaFetch(Configuration conf)
  throws IOException, InterruptedException {
  logger = LoggerFactory.getLogger(getClass());
  this.conf = conf;

  schemaFileWriter = new FileWriter(this.conf.get(Constant.HDFS_SCHEMA_REMOTE_PATH_KEY));
  sampleFileWriter = new FileWriter(this.conf.get(Constant.HDFS_SAMPLE_REMOTE_PATH_KEY));

  // login from kerberos, get the file system
  String principal = this.conf.get(Constant.HDFS_REMOTE_USER_KEY);
  String keyLocation = this.conf.get(Constant.HDFS_REMOTE_KEYTAB_LOCATION_KEY, null);


  if (keyLocation == null) {
    System.out.println("No keytab file location specified, will ignore the kerberos login process");
    fs = FileSystem.get(new Configuration());
  } else {
    try {
      Configuration hdfs_conf = new Configuration();
      hdfs_conf.set("hadoop.security.authentication", "Kerberos");
      hdfs_conf.set("dfs.namenode.kerberos.principal.pattern", "*");
      UserGroupInformation.setConfiguration(hdfs_conf);
      UserGroupInformation.loginUserFromKeytab(principal, keyLocation);
      fs = FileSystem.get(hdfs_conf);
    } catch (IOException e) {
      System.out
          .println("Failed, Try to login through kerberos. Priciple: " + principal + " keytab location : " + keyLocation);
      e.printStackTrace();
      System.out.println("Use default, assume no kerbero needed");
      fs = FileSystem.get(new Configuration());
    }
  }

  // TODO Write to hdfs
  // String sampleDataFolder = "/projects/wherehows/hdfs/sample_data";
  // String cluster = this.conf.get("hdfs.cluster");
  // sampleDataAvroWriter = new AvroWriter(this.fs, sampleDataFolder + "/" + cluster, SampleDataRecord.class);
  // String schemaFolder = this.conf.get("hdfs.schema_location");

  fileAnalyzerFactory = new FileAnalyzerFactory(this.fs);
}
 
Example 16
Project: hadoop   File: TestRMRestart.java   View Source Code Vote up 4 votes
@Test (timeout = 60000)
public void testDelegationTokenRestoredInDelegationTokenRenewer()
    throws Exception {
  conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 2);
  conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION,
      "kerberos");
  UserGroupInformation.setConfiguration(conf);

  MemoryRMStateStore memStore = new MemoryRMStateStore();
  memStore.init(conf);
  RMState rmState = memStore.getState();

  Map<ApplicationId, ApplicationStateData> rmAppState =
      rmState.getApplicationState();
  MockRM rm1 = new TestSecurityMockRM(conf, memStore);
  rm1.start();

  HashSet<Token<RMDelegationTokenIdentifier>> tokenSet =
      new HashSet<Token<RMDelegationTokenIdentifier>>();

  // create an empty credential
  Credentials ts = new Credentials();

  // create tokens and add into credential
  Text userText1 = new Text("user1");
  RMDelegationTokenIdentifier dtId1 =
      new RMDelegationTokenIdentifier(userText1, new Text("renewer1"),
        userText1);
  Token<RMDelegationTokenIdentifier> token1 =
      new Token<RMDelegationTokenIdentifier>(dtId1,
        rm1.getRMContext().getRMDelegationTokenSecretManager());
  SecurityUtil.setTokenService(token1, rmAddr);
  ts.addToken(userText1, token1);
  tokenSet.add(token1);

  Text userText2 = new Text("user2");
  RMDelegationTokenIdentifier dtId2 =
      new RMDelegationTokenIdentifier(userText2, new Text("renewer2"),
        userText2);
  Token<RMDelegationTokenIdentifier> token2 =
      new Token<RMDelegationTokenIdentifier>(dtId2,
        rm1.getRMContext().getRMDelegationTokenSecretManager());
  SecurityUtil.setTokenService(token2, rmAddr);
  ts.addToken(userText2, token2);
  tokenSet.add(token2);

  // submit an app with customized credential
  RMApp app = rm1.submitApp(200, "name", "user",
      new HashMap<ApplicationAccessType, String>(), false, "default", 1, ts);

  // assert app info is saved
  ApplicationStateData appState = rmAppState.get(app.getApplicationId());
  Assert.assertNotNull(appState);

  // assert delegation tokens exist in rm1 DelegationTokenRenewr
  Assert.assertEquals(tokenSet, rm1.getRMContext()
    .getDelegationTokenRenewer().getDelegationTokens());

  // assert delegation tokens are saved
  DataOutputBuffer dob = new DataOutputBuffer();
  ts.writeTokenStorageToStream(dob);
  ByteBuffer securityTokens =
      ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
  securityTokens.rewind();
  Assert.assertEquals(securityTokens, appState
    .getApplicationSubmissionContext().getAMContainerSpec()
    .getTokens());

  // start new RM
  MockRM rm2 = new TestSecurityMockRM(conf, memStore);
  rm2.start();

  // Need to wait for a while as now token renewal happens on another thread
  // and is asynchronous in nature.
  waitForTokensToBeRenewed(rm2);

  // verify tokens are properly populated back to rm2 DelegationTokenRenewer
  Assert.assertEquals(tokenSet, rm2.getRMContext()
    .getDelegationTokenRenewer().getDelegationTokens());
}
 
Example 17
Project: hadoop-oss   File: TestKMS.java   View Source Code Vote up 4 votes
@Before
public void cleanUp() {
  // resetting kerberos security
  Configuration conf = new Configuration();
  UserGroupInformation.setConfiguration(conf);
}
 
Example 18
Project: hadoop   File: TestKMS.java   View Source Code Vote up 4 votes
@Before
public void cleanUp() {
  // resetting kerberos security
  Configuration conf = new Configuration();
  UserGroupInformation.setConfiguration(conf);
}
 
Example 19
Project: hadoop   File: TestRMWebappAuthentication.java   View Source Code Vote up 4 votes
private static void setupAndStartRM(Configuration conf) {
  UserGroupInformation.setConfiguration(conf);
  rm = new MockRM(conf);
}
 
Example 20
Project: hadoop   File: AbstractSecureRegistryTest.java   View Source Code Vote up 3 votes
/**
 * Init hadoop security by setting up the UGI config
 */
public static void initHadoopSecurity() {

  UserGroupInformation.setConfiguration(CONF);

  KerberosName.setRules(kerberosRule);
}