Java Code Examples for org.apache.hadoop.security.UserGroupInformation.loginUserFromKeytab()

The following are Jave code examples for showing how to use loginUserFromKeytab() of the org.apache.hadoop.security.UserGroupInformation class. You can vote up the examples you like. Your votes will be used in our system to get more good examples.
Example 1
Project: Transwarp-Sample-Code   File: UploadFile.java   Source Code and License Vote up 7 votes
public static void main(String[] args) throws IOException {
    Configuration conf = new Configuration();
    conf.addResource("core-site.xml");
    conf.addResource("hdfs-site.xml");
    conf.addResource("yarn-site.xml");
    // 没开kerberos,注释下面两行
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation.loginUserFromKeytab("[email protected]","E:\\星环\\任务\\2016年11月28日\\hdfs.keytab");
    String localFile = "E:\\星环\\yarn-site.xml";
    InputStream in = new BufferedInputStream(new FileInputStream(localFile));
    Path p = new Path( "/tmp/yarn-site.xml");
    FileSystem fs = p.getFileSystem(conf);
    OutputStream out = fs.create(p);
    IOUtils.copyBytes(in, out, conf);
    fs.close();
    IOUtils.closeStream(in);
}
 
Example 2
Project: monarch   File: MTableCDCHDFSListener.java   Source Code and License Vote up 7 votes
public FSDataOutputStream getFileStream() throws IOException {
  Configuration conf = new Configuration();
  // Add configurations
  conf.addResource(new Path("file:///opt/conf/core-site.xml"));
  conf.addResource(new Path("file:///opt/conf/hdfs-site.xml"));

  // set config to UGI
  UserGroupInformation.setConfiguration(conf);
  // provide user pricipal and keytab to authenticate using kerberos
  UserGroupInformation.loginUserFromKeytab("[email protected]", "/opt/ampool.keytab");

  FileSystem fs = FileSystem.get(conf);
  Path file = new Path("/tmp/data/EmployeeData" + fileIndex++ + ".csv");

  if (fs.exists(file)) {
    fs.delete(file, false);
  }
  FSDataOutputStream fsDataOutputStream = fs.create(file);
  return fsDataOutputStream;
}
 
Example 3
Project: hadoop-oss   File: TestKMS.java   Source Code and License Vote up 6 votes
private <T> T doAs(String user, final PrivilegedExceptionAction<T> action)
    throws Exception {
  UserGroupInformation.loginUserFromKeytab(user, keytab.getAbsolutePath());
  UserGroupInformation ugi = UserGroupInformation.getLoginUser();
  try {
    return ugi.doAs(action);
  } finally {
    ugi.logoutUserFromKeytab();
  }
}
 
Example 4
Project: Transwarp-Sample-Code   File: Delete.java   Source Code and License Vote up 6 votes
public static void main(String[] args) {
    String rootPath = "hdfs://nameservice1";
    Path p = new Path(rootPath + "/tmp/file.txt");
    Configuration conf = new Configuration();
    conf.addResource("core-site.xml");
    conf.addResource("hdfs-site.xml");
    conf.addResource("yarn-site.xml");
    try {
        // 没开kerberos,注释下面两行
        UserGroupInformation.setConfiguration(conf);
        UserGroupInformation.loginUserFromKeytab("[email protected]","E:\\星环\\hdfs.keytab");
        FileSystem fs = p.getFileSystem(conf);
        boolean b = fs.delete(p, true);
        System.out.println(b);
        fs.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}
 
Example 5
Project: Transwarp-Sample-Code   File: CreateDir.java   Source Code and License Vote up 6 votes
public static void main(String[] args) throws IOException {
    // 通过Java API创建HDFS目录
    String rootPath = "hdfs://nameservice1";
    Path p = new Path(rootPath + "/tmp/newDir3");

    Configuration conf = new Configuration();
    conf.addResource("core-site.xml");
    conf.addResource("hdfs-site.xml");
    conf.addResource("yarn-site.xml");
    // 没开kerberos,注释下面两行
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation.loginUserFromKeytab("[email protected]","E:\\星环\\hdfs.keytab");
    FileSystem fs = p.getFileSystem(conf);
    boolean b = fs.mkdirs(p);
    System.out.println(b);
    fs.close();
}
 
Example 6
Project: easyhbase   File: HbaseConfigurationFactoryBean.java   Source Code and License Vote up 6 votes
public void afterPropertiesSet() {
    this.configuration = (this.hadoopConfig != null ? HBaseConfiguration.create(this
            .hadoopConfig) : HBaseConfiguration.create());
    addProperties(this.configuration, this.properties);
    if (("kerberos".equalsIgnoreCase(this.configuration.get(HBASE_SECURITY_AUTHENTICATION)))) {
        UserGroupInformation.setConfiguration(this.configuration);
        try {
            UserGroupInformation.loginUserFromKeytab(this.configuration.get(PRINCIPAL), this
                    .configuration.get(KEYTAB_PATH));
            LOG.info(" user " + this.configuration.get(PRINCIPAL) + " login successfully, " +
                    "keytab: " + this
                    .configuration.get(KEYTAB_PATH));
        } catch (IOException e) {
            LOG.info(" user " + PRINCIPAL + " login failed");
            throw new RuntimeException(e);
        }
    }
}
 
Example 7
Project: hadoop-oss   File: MiniRPCBenchmark.java   Source Code and License Vote up 5 votes
/** Start RPC server */
MiniServer(Configuration conf, String user, String keytabFile)
throws IOException {
  UserGroupInformation.setConfiguration(conf);
  UserGroupInformation.loginUserFromKeytab(user, keytabFile);
  secretManager = 
    new TestDelegationTokenSecretManager(24*60*60*1000,
        7*24*60*60*1000,24*60*60*1000,3600000);
  secretManager.startThreads();
  rpcServer = new RPC.Builder(conf).setProtocol(MiniProtocol.class)
      .setInstance(this).setBindAddress(DEFAULT_SERVER_ADDRESS).setPort(0)
      .setNumHandlers(1).setVerbose(false).setSecretManager(secretManager)
      .build();
  rpcServer.start();
}
 
Example 8
Project: Transwarp-Sample-Code   File: CreateFile.java   Source Code and License Vote up 5 votes
public static void main(String[] args) throws IOException {
    // 通过Java API创建文件
    String rootPath = "hdfs://nameservice1";
    Path p = new Path(rootPath + "/tmp/file.txt");
    Configuration conf = new Configuration();
    conf.addResource("core-site.xml");
    conf.addResource("hdfs-site.xml");
    conf.addResource("yarn-site.xml");
    // 没开kerberos,注释下面两行
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation.loginUserFromKeytab("[email protected]","E:\\星环\\hdfs.keytab");
    FileSystem fs = p.getFileSystem(conf);
    fs.create(p);
    fs.close();
}
 
Example 9
Project: Transwarp-Sample-Code   File: Connector.java   Source Code and License Vote up 5 votes
public Connector() {
        Configuration HBASE_CONFIG = new Configuration();
        Constant constant = new Constant();
        HBASE_CONFIG.addResource("hbase-site.xml");
        HBASE_CONFIG.addResource("core-site.xml");
        HBASE_CONFIG.addResource("hdfs-site.xml");
        configuration = HBaseConfiguration.create(HBASE_CONFIG);

        try {
            if (constant.MODE.equals("kerberos")) {
                UserGroupInformation.setConfiguration(configuration);
//                UserGroupInformation.loginUserFromPassword(constant.KERBEROS_USER,constant.KERBEROS_PASSWD);
                UserGroupInformation.loginUserFromKeytab(constant.KERBEROS_WITH_KEYTAB_USER,constant.KERBEROS_KEYTAB);
            }
            hBaseAdmin = new HBaseAdmin(configuration);
            hyperbaseAdmin = new HyperbaseAdmin(configuration);

            Configuration hdfsConf = getHDFSConf();
            String rootPath = "hdfs://nameservice1";
            Path p = new Path(rootPath + constant.HDFS_LARGE_FILE_DIR);
            FileSystem fs = p.getFileSystem(hdfsConf);
            boolean b = fs.mkdirs(p);
            System.out.println(b);
            fs.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
 
Example 10
Project: monarch   File: ConfigurationUtils.java   Source Code and License Vote up 5 votes
/**
 * Creates the hadoop configuration object from the properties specified for tierstore
 * 
 * @return configuration object
 */
public static Configuration getConfiguration(final Properties props) throws IOException {
  Configuration conf = new Configuration();
  String hdfsSiteXMLPath = props.getProperty(CommonConfig.HDFS_SITE_XML_PATH);
  String hadoopSiteXMLPath = props.getProperty(CommonConfig.HADOOP_SITE_XML_PATH);
  if (hdfsSiteXMLPath != null) {
    conf.addResource(Paths.get(hdfsSiteXMLPath).toUri().toURL());
  }
  if (hadoopSiteXMLPath != null) {
    conf.addResource(Paths.get(hadoopSiteXMLPath).toUri().toURL());
  }

  props.entrySet().forEach((PROP) -> {
    conf.set(String.valueOf(PROP.getKey()), String.valueOf(PROP.getValue()));
  });

  // set secured properties
  String userName = props.getProperty(CommonConfig.USER_NAME);
  String keytabPath = props.getProperty(CommonConfig.KEYTAB_PATH);
  if (userName == null || keytabPath == null) {
    if (props.containsKey(ENABLE_KERBEROS_AUTHC)
        && Boolean.parseBoolean(props.getProperty(ENABLE_KERBEROS_AUTHC))) {
      userName = props.getProperty(ResourceConstants.USER_NAME);
      keytabPath = props.getProperty(ResourceConstants.PASSWORD);
    }
  }

  // use the username and keytab
  if (userName != null && keytabPath != null) {
    // set kerberos authentication
    conf.set("hadoop.security.authentication", "kerberos");
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation.loginUserFromKeytab(userName, keytabPath);
  }
  return conf;
}
 
Example 11
Project: monarch   File: HDFSQuasiService.java   Source Code and License Vote up 5 votes
public void createSecuredUserDir(String userName, String keytabdir) {
  try {
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation.loginUserFromKeytab(getHDFSPrincipal(""),
        keytabdir + File.separator + "hdfs.keytab");
    FileSystem fs = FileSystem.get(conf);
    Path userDir = new Path("/user" + File.separator + userName);
    fs.mkdirs(userDir, new FsPermission(FsAction.ALL, FsPermission.getDefault().getGroupAction(),
        FsPermission.getDefault().getOtherAction()));
    fs.setOwner(userDir, userName, "hadoop");
  } catch (IOException e) {
    e.printStackTrace();
  }

}
 
Example 12
Project: monarch   File: HDFSQuasiService.java   Source Code and License Vote up 5 votes
public boolean checkFileExistsSecured(final String user, final String keytab, String storeBaseDir,
    String tableName) {
  try {
    UserGroupInformation.setConfiguration(conf);
    UserGroupInformation.loginUserFromKeytab(user, keytab);
    FileSystem fs = FileSystem.get(conf);
    Path storeBasePath = new Path(fs.getHomeDirectory(), storeBaseDir);
    Path tablePath = new Path(storeBasePath, tableName);
    return fs.exists(tablePath);
  } catch (IOException e) {
    e.printStackTrace();
  }
  return false;
}
 
Example 13
Project: monarch   File: HDFSClient.java   Source Code and License Vote up 5 votes
public static void main(String[] args) throws IOException {
  System.setProperty("java.security.krb5.conf", args[0]);
  Configuration conf = new Configuration();
  conf.addResource(new FileInputStream("/tmp/testCluster/hdfs.xml"));
  System.out.println("default fs :- " + conf.get("fs.defaultFS"));
  UserGroupInformation.setConfiguration(conf);
  UserGroupInformation.loginUserFromKeytab("hdfs/[email protected]",
      "/tmp/testCluster/kdc/keytabs/hdfs.keytab");

  FileSystem fs = FileSystem.get(conf);

  // Path path = new Path("/hello.txt");
  // FSDataOutputStream fin = fs.create(path);
  // fin.writeUTF("hello");
  // fin.close();
  //
  Path hbaseDir = new Path("/user");
  fs.mkdirs(hbaseDir);
  fs.setOwner(hbaseDir, "hdfs", "supergroup");


  /*
   * RemoteIterator<LocatedFileStatus> fileList = listHDFSFiles(fs, new Path("/user"));
   * System.out.println(fileList.hasNext()); if(fileList!=null){ while (fileList.hasNext()){
   * LocatedFileStatus file = fileList.next();
   * System.out.println("--------------------------------------------------------");
   * System.out.println(file.getPath()); System.out.println(file.getPermission());
   * System.out.println(file.getOwner());
   * System.out.println("--------------------------------------------------------"); } }
   */

  FileStatus[] stats = fs.listStatus(new Path("/"));
  for (FileStatus fst : stats) {
    System.out.println(fst.getPath());
  }
}
 
Example 14
Project: hadoop   File: MiniRPCBenchmark.java   Source Code and License Vote up 5 votes
/** Start RPC server */
MiniServer(Configuration conf, String user, String keytabFile)
throws IOException {
  UserGroupInformation.setConfiguration(conf);
  UserGroupInformation.loginUserFromKeytab(user, keytabFile);
  secretManager = 
    new TestDelegationTokenSecretManager(24*60*60*1000,
        7*24*60*60*1000,24*60*60*1000,3600000);
  secretManager.startThreads();
  rpcServer = new RPC.Builder(conf).setProtocol(MiniProtocol.class)
      .setInstance(this).setBindAddress(DEFAULT_SERVER_ADDRESS).setPort(0)
      .setNumHandlers(1).setVerbose(false).setSecretManager(secretManager)
      .build();
  rpcServer.start();
}
 
Example 15
Project: dremio-oss   File: DACDaemon.java   Source Code and License Vote up 5 votes
/**
 * Set up the current user in {@link UserGroupInformation} using the kerberos principal and keytab file path if
 * present in config. If not present, this method call is a no-op. When communicating with the kerberos enabled
 * Hadoop based filesystem credentials in {@link UserGroupInformation} will be used..
 * @param config
 * @throws IOException
 */
private void setupHadoopUserUsingKerberosKeytab(final DremioConfig config) throws IOException {
  final String kerberosPrincipal = config.getString(DremioConfig.KERBEROS_PRINCIPAL);
  final String kerberosKeytab = config.getString(DremioConfig.KERBEROS_KEYTAB_PATH);

  if (Strings.isNullOrEmpty(kerberosPrincipal) || Strings.isNullOrEmpty(kerberosKeytab)) {
    return;
  }

  UserGroupInformation.loginUserFromKeytab(kerberosPrincipal, kerberosKeytab);

  logger.info("Setup Hadoop user info using kerberos principal {} and keytab file {} successful.",
      kerberosPrincipal, kerberosKeytab);
}
 
Example 16
Project: ditb   File: TestSecureRPC.java   Source Code and License Vote up 5 votes
private UserGroupInformation loginKerberosPrincipal(String krbKeytab, String krbPrincipal)
    throws Exception {
  Configuration cnf = new Configuration();
  cnf.set(CommonConfigurationKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
  UserGroupInformation.setConfiguration(cnf);
  UserGroupInformation.loginUserFromKeytab(krbPrincipal, krbKeytab);
  return UserGroupInformation.getLoginUser();
}
 
Example 17
Project: wherehowsX   File: SchemaFetch.java   Source Code and License Vote up 4 votes
public SchemaFetch(Configuration conf)
  throws IOException, InterruptedException {
  logger = LoggerFactory.getLogger(getClass());
  this.conf = conf;

  schemaFileWriter = new FileWriter(this.conf.get(Constant.HDFS_SCHEMA_REMOTE_PATH_KEY));
  sampleFileWriter = new FileWriter(this.conf.get(Constant.HDFS_SAMPLE_REMOTE_PATH_KEY));

  // login from kerberos, get the file system
  String principal = this.conf.get(Constant.HDFS_REMOTE_USER_KEY);
  String keyLocation = this.conf.get(Constant.HDFS_REMOTE_KEYTAB_LOCATION_KEY, null);


  if (keyLocation == null) {
    System.out.println("No keytab file location specified, will ignore the kerberos login process");
    fs = FileSystem.get(new Configuration());
  } else {
    try {
      Configuration hdfs_conf = new Configuration();
      hdfs_conf.set("hadoop.security.authentication", "Kerberos");
      hdfs_conf.set("dfs.namenode.kerberos.principal.pattern", "*");
      UserGroupInformation.setConfiguration(hdfs_conf);
      UserGroupInformation.loginUserFromKeytab(principal, keyLocation);
      fs = FileSystem.get(hdfs_conf);
    } catch (IOException e) {
      System.out
          .println("Failed, Try to login through kerberos. Priciple: " + principal + " keytab location : " + keyLocation);
      e.printStackTrace();
      System.out.println("Use default, assume no kerbero needed");
      fs = FileSystem.get(new Configuration());
    }
  }

  // TODO Write to hdfs
  // String sampleDataFolder = "/projects/wherehows/hdfs/sample_data";
  // String cluster = this.conf.get("hdfs.cluster");
  // sampleDataAvroWriter = new AvroWriter(this.fs, sampleDataFolder + "/" + cluster, SampleDataRecord.class);
  // String schemaFolder = this.conf.get("hdfs.schema_location");

  fileAnalyzerFactory = new FileAnalyzerFactory(this.fs);
}