Java Code Examples for org.apache.hadoop.security.UserGroupInformation.setLoginUser()

The following are Jave code examples for showing how to use setLoginUser() of the org.apache.hadoop.security.UserGroupInformation class. You can vote up the examples you like. Your votes will be used in our system to get more good examples.
+ Save this method
Example 1
Project: hadoop   File: TestApplicationMasterServiceProtocolOnHA.java   View Source Code Vote up 6 votes
@Before
public void initialize() throws Exception {
  startHACluster(0, false, false, true);
  attemptId = this.cluster.createFakeApplicationAttemptId();
  amClient = ClientRMProxy
      .createRMProxy(this.conf, ApplicationMasterProtocol.class);

  Token<AMRMTokenIdentifier> appToken =
      this.cluster.getResourceManager().getRMContext()
        .getAMRMTokenSecretManager().createAndGetAMRMToken(attemptId);
  appToken.setService(ClientRMProxy.getAMRMTokenService(conf));
  UserGroupInformation.setLoginUser(UserGroupInformation
      .createRemoteUser(UserGroupInformation.getCurrentUser()
          .getUserName()));
  UserGroupInformation.getCurrentUser().addToken(appToken);
  syncToken(appToken);
}
 
Example 2
Project: hadoop   File: TestDFSClientCache.java   View Source Code Vote up 6 votes
@Test
public void testGetUserGroupInformationSecure() throws IOException {
  String userName = "user1";
  String currentUser = "test-user";


  NfsConfiguration conf = new NfsConfiguration();
  UserGroupInformation currentUserUgi
          = UserGroupInformation.createRemoteUser(currentUser);
  currentUserUgi.setAuthenticationMethod(KERBEROS);
  UserGroupInformation.setLoginUser(currentUserUgi);

  DFSClientCache cache = new DFSClientCache(conf);
  UserGroupInformation ugiResult
          = cache.getUserGroupInformation(userName, currentUserUgi);

  assertThat(ugiResult.getUserName(), is(userName));
  assertThat(ugiResult.getRealUser(), is(currentUserUgi));
  assertThat(
          ugiResult.getAuthenticationMethod(),
          is(UserGroupInformation.AuthenticationMethod.PROXY));
}
 
Example 3
Project: hadoop   File: TestWebHdfsUrl.java   View Source Code Vote up 6 votes
@Test(timeout=60000)
public void testSimpleAuthParamsInUrl() throws IOException {
  Configuration conf = new Configuration();

  UserGroupInformation ugi =
      UserGroupInformation.createRemoteUser("test-user");
  UserGroupInformation.setLoginUser(ugi);

  WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
  Path fsPath = new Path("/");

  // send user+token
  URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString()
      },
      fileStatusUrl);
}
 
Example 4
Project: hadoop   File: TestWebHdfsUrl.java   View Source Code Vote up 6 votes
@Test(timeout=60000)
public void testSimpleProxyAuthParamsInUrl() throws IOException {
  Configuration conf = new Configuration();

  UserGroupInformation ugi =
      UserGroupInformation.createRemoteUser("test-user");
  ugi = UserGroupInformation.createProxyUser("test-proxy-user", ugi);
  UserGroupInformation.setLoginUser(ugi);

  WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
  Path fsPath = new Path("/");

  // send real+effective
  URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new UserParam(ugi.getRealUser().getShortUserName()).toString(),
          new DoAsParam(ugi.getShortUserName()).toString()
  },
      fileStatusUrl);
}
 
Example 5
Project: hadoop   File: TestWebHdfsUrl.java   View Source Code Vote up 6 votes
@Test(timeout=60000)
public void testCheckAccessUrl() throws IOException {
  Configuration conf = new Configuration();

  UserGroupInformation ugi =
      UserGroupInformation.createRemoteUser("test-user");
  UserGroupInformation.setLoginUser(ugi);

  WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
  Path fsPath = new Path("/p1");

  URL checkAccessUrl = webhdfs.toUrl(GetOpParam.Op.CHECKACCESS,
      fsPath, new FsActionParam(FsAction.READ_WRITE));
  checkQueryParams(
      new String[]{
          GetOpParam.Op.CHECKACCESS.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          FsActionParam.NAME + "=" + FsAction.READ_WRITE.SYMBOL
      },
      checkAccessUrl);
}
 
Example 6
Project: hadoop   File: TestRMContainerAllocator.java   View Source Code Vote up 5 votes
@Before
public void setup() {
  MyContainerAllocator.getJobUpdatedNodeEvents().clear();
  MyContainerAllocator.getTaskAttemptKillEvents().clear();

  // make each test create a fresh user to avoid leaking tokens between tests
  UserGroupInformation.setLoginUser(null);
}
 
Example 7
Project: hadoop   File: TestGlobPaths.java   View Source Code Vote up 5 votes
@BeforeClass
public static void setUp() throws Exception {
  final Configuration conf = new HdfsConfiguration();
  dfsCluster = new MiniDFSCluster.Builder(conf).build();

  privilegedFs = FileSystem.get(conf);
  privilegedFc = FileContext.getFileContext(conf);
  // allow unpriviledged user ability to create paths
  privilegedFs.setPermission(new Path("/"),
                             FsPermission.createImmutable((short)0777));
  UserGroupInformation.setLoginUser(unprivilegedUser);
  fs = FileSystem.get(conf);
  fc = FileContext.getFileContext(conf);
  USER_DIR = fs.getHomeDirectory().toUri().getPath().toString();
}
 
Example 8
Project: hadoop   File: TestWebHdfsTokens.java   View Source Code Vote up 5 votes
@BeforeClass
public static void setUp() {
  conf = new Configuration();
  SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
  UserGroupInformation.setConfiguration(conf);    
  UserGroupInformation.setLoginUser(
      UserGroupInformation.createUserForTesting(
          "LoginUser", new String[]{"supergroup"}));
}
 
Example 9
Project: mapr-music   File: MaprMusicElasticSearchService.java   View Source Code Vote up 4 votes
private static void loginTestUser(String username, String group) {
    UserGroupInformation currentUgi = UserGroupInformation.createUserForTesting(username, new String[]{group});
    UserGroupInformation.setLoginUser(currentUgi);
}
 
Example 10
Project: mapr-music   File: MaprDbDao.java   View Source Code Vote up 4 votes
private static void loginTestUser(String username, String group) {
    UserGroupInformation currentUgi = UserGroupInformation.createUserForTesting(username, new String[]{group});
    UserGroupInformation.setLoginUser(currentUgi);
}
 
Example 11
Project: mapr-music   File: ArtistsChangelogListenerService.java   View Source Code Vote up 4 votes
private static void loginTestUser(String username, String group) {
    UserGroupInformation currentUgi = UserGroupInformation.createUserForTesting(username, new String[]{group});
    UserGroupInformation.setLoginUser(currentUgi);
}
 
Example 12
Project: mapr-music   File: CdcStatisticService.java   View Source Code Vote up 4 votes
private static void loginTestUser(String username, String group) {
    UserGroupInformation currentUgi = UserGroupInformation.createUserForTesting(username, new String[]{group});
    UserGroupInformation.setLoginUser(currentUgi);
}
 
Example 13
Project: hadoop   File: TestAMRMClient.java   View Source Code Vote up 4 votes
@Before
public void startApp() throws Exception {
  // submit new app
  ApplicationSubmissionContext appContext = 
      yarnClient.createApplication().getApplicationSubmissionContext();
  ApplicationId appId = appContext.getApplicationId();
  // set the application name
  appContext.setApplicationName("Test");
  // Set the priority for the application master
  Priority pri = Records.newRecord(Priority.class);
  pri.setPriority(0);
  appContext.setPriority(pri);
  // Set the queue to which this application is to be submitted in the RM
  appContext.setQueue("default");
  // Set up the container launch context for the application master
  ContainerLaunchContext amContainer =
      BuilderUtils.newContainerLaunchContext(
        Collections.<String, LocalResource> emptyMap(),
        new HashMap<String, String>(), Arrays.asList("sleep", "100"),
        new HashMap<String, ByteBuffer>(), null,
        new HashMap<ApplicationAccessType, String>());
  appContext.setAMContainerSpec(amContainer);
  appContext.setResource(Resource.newInstance(1024, 1, 1));
  // Create the request to send to the applications manager
  SubmitApplicationRequest appRequest = Records
      .newRecord(SubmitApplicationRequest.class);
  appRequest.setApplicationSubmissionContext(appContext);
  // Submit the application to the applications manager
  yarnClient.submitApplication(appContext);

  // wait for app to start
  RMAppAttempt appAttempt = null;
  while (true) {
    ApplicationReport appReport = yarnClient.getApplicationReport(appId);
    if (appReport.getYarnApplicationState() == YarnApplicationState.ACCEPTED) {
      attemptId = appReport.getCurrentApplicationAttemptId();
      appAttempt =
          yarnCluster.getResourceManager().getRMContext().getRMApps()
            .get(attemptId.getApplicationId()).getCurrentAppAttempt();
      while (true) {
        if (appAttempt.getAppAttemptState() == RMAppAttemptState.LAUNCHED) {
          break;
        }
      }
      break;
    }
  }
  // Just dig into the ResourceManager and get the AMRMToken just for the sake
  // of testing.
  UserGroupInformation.setLoginUser(UserGroupInformation
    .createRemoteUser(UserGroupInformation.getCurrentUser().getUserName()));

  // emulate RM setup of AMRM token in credentials by adding the token
  // *before* setting the token service
  UserGroupInformation.getCurrentUser().addToken(appAttempt.getAMRMToken());
  appAttempt.getAMRMToken().setService(ClientRMProxy.getAMRMTokenService(conf));
}
 
Example 14
Project: hadoop   File: TestWebHdfsUrl.java   View Source Code Vote up 4 votes
@Test(timeout=60000)
public void testSecureAuthParamsInUrl() throws IOException {
  Configuration conf = new Configuration();
  // fake turning on security so api thinks it should use tokens
  SecurityUtil.setAuthenticationMethod(KERBEROS, conf);
  UserGroupInformation.setConfiguration(conf);

  UserGroupInformation ugi =
      UserGroupInformation.createRemoteUser("test-user");
  ugi.setAuthenticationMethod(KERBEROS);
  UserGroupInformation.setLoginUser(ugi);

  WebHdfsFileSystem webhdfs = getWebHdfsFileSystem(ugi, conf);
  Path fsPath = new Path("/");
  String tokenString = webhdfs.getDelegationToken().encodeToUrlString();

  // send user
  URL getTokenUrl = webhdfs.toUrl(GetOpParam.Op.GETDELEGATIONTOKEN, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString()
      },
      getTokenUrl);

  // send user
  URL renewTokenUrl = webhdfs.toUrl(PutOpParam.Op.RENEWDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.RENEWDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      renewTokenUrl);

  // send token
  URL cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      cancelTokenUrl);
  
  // send token
  URL fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new DelegationParam(tokenString).toString()
      },
      fileStatusUrl);

  // wipe out internal token to simulate auth always required
  webhdfs.setDelegationToken(null);

  // send user
  cancelTokenUrl = webhdfs.toUrl(PutOpParam.Op.CANCELDELEGATIONTOKEN,
      fsPath, new TokenArgumentParam(tokenString));
  checkQueryParams(
      new String[]{
          PutOpParam.Op.CANCELDELEGATIONTOKEN.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString(),
          new TokenArgumentParam(tokenString).toString(),
      },
      cancelTokenUrl);

  // send user
  fileStatusUrl = webhdfs.toUrl(GetOpParam.Op.GETFILESTATUS, fsPath);
  checkQueryParams(
      new String[]{
          GetOpParam.Op.GETFILESTATUS.toQueryString(),
          new UserParam(ugi.getShortUserName()).toString()
      },
      fileStatusUrl);    
}
 
Example 15
Project: hadoop   File: TestWebHDFS.java   View Source Code Vote up 4 votes
@Test(timeout=300000)
public void testLargeDirectory() throws Exception {
  final Configuration conf = WebHdfsTestUtil.createConf();
  final int listLimit = 2;
  // force small chunking of directory listing
  conf.setInt(DFSConfigKeys.DFS_LIST_LIMIT, listLimit);
  // force paths to be only owner-accessible to ensure ugi isn't changing
  // during listStatus
  FsPermission.setUMask(conf, new FsPermission((short)0077));
  
  final MiniDFSCluster cluster =
      new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
  try {
    cluster.waitActive();
    WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsFileSystem.SCHEME)
        .setPermission(new Path("/"),
            new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));

    // trick the NN into not believing it's not the superuser so we can
    // tell if the correct user is used by listStatus
    UserGroupInformation.setLoginUser(
        UserGroupInformation.createUserForTesting(
            "not-superuser", new String[]{"not-supergroup"}));

    UserGroupInformation.createUserForTesting("me", new String[]{"my-group"})
      .doAs(new PrivilegedExceptionAction<Void>() {
        @Override
        public Void run() throws IOException, URISyntaxException {
            FileSystem fs = WebHdfsTestUtil.getWebHdfsFileSystem(conf,
                WebHdfsFileSystem.SCHEME);
            Path d = new Path("/my-dir");
          Assert.assertTrue(fs.mkdirs(d));
          for (int i=0; i < listLimit*3; i++) {
            Path p = new Path(d, "file-"+i);
            Assert.assertTrue(fs.createNewFile(p));
          }
          Assert.assertEquals(listLimit*3, fs.listStatus(d).length);
          return null;
        }
      });
  } finally {
    cluster.shutdown();
  }
}