Java Code Examples for org.apache.hadoop.hive.metastore.api.PrincipalType#USER

The following examples show how to use org.apache.hadoop.hive.metastore.api.PrincipalType#USER . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
@Override
public Task<? extends Serializable> createShowRoleGrantTask(ASTNode ast, Path resultFile,
    HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
  ASTNode child = (ASTNode) ast.getChild(0);
  PrincipalType principalType = PrincipalType.USER;
  switch (child.getType()) {
  case HiveParser.TOK_USER:
    principalType = PrincipalType.USER;
    break;
  case HiveParser.TOK_GROUP:
    principalType = PrincipalType.GROUP;
    break;
  case HiveParser.TOK_ROLE:
    principalType = PrincipalType.ROLE;
    break;
  }
  if (principalType != PrincipalType.GROUP) {
    String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + principalType;
    throw new SemanticException(msg);
  }
  String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
  RoleDDLDesc roleDesc = new RoleDDLDesc(principalName, principalType,
      RoleDDLDesc.RoleOperation.SHOW_ROLE_GRANT, null);
  roleDesc.setResFile(resultFile.toString());
  return createTask(new DDLWork(inputs, outputs,  roleDesc));
}
 
Example 2
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 6 votes vote down vote up
private Task<? extends Serializable> analyzeGrantRevokeRole(boolean isGrant, ASTNode ast,
    HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs) throws SemanticException {
  List<PrincipalDesc> principalDesc = analyzePrincipalListDef(
      (ASTNode) ast.getChild(0));

  List<String> roles = new ArrayList<String>();
  for (int i = 1; i < ast.getChildCount(); i++) {
    roles.add(BaseSemanticAnalyzer.unescapeIdentifier(ast.getChild(i).getText()));
  }
  String roleOwnerName = "";
  if (SessionState.get() != null
      && SessionState.get().getAuthenticator() != null) {
    roleOwnerName = SessionState.get().getAuthenticator().getUserName();
  }
  for (PrincipalDesc princ : principalDesc) {
    if (princ.getType() != PrincipalType.GROUP) {
      String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_ON_OBJECT + princ.getType();
      throw new SemanticException(msg);
    }
  }
  GrantRevokeRoleDDL grantRevokeRoleDDL = new GrantRevokeRoleDDL(isGrant,
      roles, principalDesc, roleOwnerName, PrincipalType.USER, false);
  return createTask(new DDLWork(inputs, outputs, grantRevokeRoleDDL));
}
 
Example 3
Source File: CatalogToHiveConverter.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 5 votes vote down vote up
public static PrincipalType convertPrincipalType(com.amazonaws.services.glue.model.PrincipalType catalogPrincipalType) {
  if(catalogPrincipalType == null) {
    return null;
  }
  
  if(catalogPrincipalType == com.amazonaws.services.glue.model.PrincipalType.GROUP) {
    return PrincipalType.GROUP;
  } else if(catalogPrincipalType == com.amazonaws.services.glue.model.PrincipalType.USER) {
    return PrincipalType.USER;
  } else if(catalogPrincipalType == com.amazonaws.services.glue.model.PrincipalType.ROLE) {
    return PrincipalType.ROLE;
  }
  throw new RuntimeException("Unknown principal type:" + catalogPrincipalType.name());
}
 
Example 4
Source File: GlueMetastoreClientDelegate.java    From aws-glue-data-catalog-client-for-apache-hive-metastore with Apache License 2.0 5 votes vote down vote up
public List<org.apache.hadoop.hive.metastore.api.Role> listRoles(
    String principalName,
    org.apache.hadoop.hive.metastore.api.PrincipalType principalType
) throws TException {
  // All users belong to public role implicitly, add that role
  // Bring logic from Hive's ObjectStore
  // https://code.amazon.com/packages/Aws157Hive/blobs/48f6e30080df475ffe54c39f70dd134268e30358/
  // --/metastore/src/java/org/apache/hadoop/hive/metastore/ObjectStore.java#L4208
  if (principalType == PrincipalType.USER) {
    return implicitRoles;
  } else {
    throw new UnsupportedOperationException(
        "listRoles is only supported for " + PrincipalType.USER + " Principal type");
  }
}
 
Example 5
Source File: WaggleDanceIntegrationTest.java    From waggle-dance with Apache License 2.0 5 votes vote down vote up
@Test
public void typicalGetAllFunctions() throws Exception {
  runner = WaggleDanceRunner
      .builder(configLocation)
      .databaseResolution(DatabaseResolution.PREFIXED)
      .primary("primary", localServer.getThriftConnectionUri(), READ_ONLY)
      .withPrimaryPrefix("primary_")
      .federate(SECONDARY_METASTORE_NAME, remoteServer.getThriftConnectionUri(), REMOTE_DATABASE)
      .build();

  runWaggleDance(runner);
  HiveMetaStoreClient proxy = getWaggleDanceClient();
  List<ResourceUri> resourceUris = Lists
      .newArrayList(new ResourceUri(ResourceType.JAR, "hdfs://path/to/my/jar/my.jar"));
  Function localFunction = new Function("fn1", LOCAL_DATABASE, "com.hotels.hive.FN1", "hadoop", PrincipalType.USER, 0,
      FunctionType.JAVA, resourceUris);
  localServer.client().createFunction(localFunction);
  Function remoteFunction = new Function("fn2", REMOTE_DATABASE, "com.hotels.hive.FN1", "hadoop", PrincipalType.USER,
      0, FunctionType.JAVA, resourceUris);
  remoteServer.client().createFunction(remoteFunction);

  GetAllFunctionsResponse allFunctions = proxy.getAllFunctions();
  List<Function> functions = allFunctions.getFunctions();
  assertThat(functions.size(), is(3));
  assertThat(functions.get(0).getFunctionName(), is("fn1"));
  assertThat(functions.get(0).getDbName(), is("primary_" + LOCAL_DATABASE));
  assertThat(functions.get(1).getFunctionName(), is("fn1"));
  assertThat(functions.get(1).getDbName(), is(LOCAL_DATABASE));
  assertThat(functions.get(2).getFunctionName(), is("fn2"));
  assertThat(functions.get(2).getDbName(), is(PREFIXED_REMOTE_DATABASE));
}
 
Example 6
Source File: HiveStubs.java    From waggle-dance with Apache License 2.0 5 votes vote down vote up
public static Function newFunction(String databaseName, String functionName) {
  List<ResourceUri> resourceUris = Lists
      .newArrayList(new ResourceUri(ResourceType.JAR, "hdfs://path/to/my/jar/my.jar"));
  Function function = new Function(functionName, databaseName, "com.hotels.hive.FN", "hadoop", PrincipalType.USER, 0,
      FunctionType.JAVA, resourceUris);
  return function;
}
 
Example 7
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
@Override
public Task<? extends Serializable> createGrantTask(ASTNode ast, HashSet<ReadEntity> inputs,
    HashSet<WriteEntity> outputs) throws SemanticException {
  List<PrivilegeDesc> privilegeDesc = analyzePrivilegeListDef(
      (ASTNode) ast.getChild(0));
  List<PrincipalDesc> principalDesc = analyzePrincipalListDef(
      (ASTNode) ast.getChild(1));
  SentryHivePrivilegeObjectDesc privilegeObj = null;
  boolean grantOption = false;
  if (ast.getChildCount() > 2) {
    for (int i = 2; i < ast.getChildCount(); i++) {
      ASTNode astChild = (ASTNode) ast.getChild(i);
      if (astChild.getType() == HiveParser.TOK_GRANT_WITH_OPTION) {
        grantOption = true;
      } else if (astChild.getType() == HiveParser.TOK_PRIV_OBJECT) {
        privilegeObj = analyzePrivilegeObject(astChild);
      }
    }
  }
  String userName = null;
  if (SessionState.get() != null
      && SessionState.get().getAuthenticator() != null) {
    userName = SessionState.get().getAuthenticator().getUserName();
  }
  Preconditions.checkNotNull(privilegeObj, "privilegeObj is null for " + ast.dump());
  if (privilegeObj.getPartSpec() != null) {
    throw new SemanticException(SentryHiveConstants.PARTITION_PRIVS_NOT_SUPPORTED);
  }
  for (PrincipalDesc princ : principalDesc) {
    if (princ.getType() != PrincipalType.ROLE) {
      String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + princ.getType();
      throw new SemanticException(msg);
    }
  }
  GrantDesc grantDesc = new GrantDesc(privilegeObj, privilegeDesc,
      principalDesc, userName, PrincipalType.USER, grantOption);
  return createTask(new DDLWork(inputs, outputs, grantDesc));
}
 
Example 8
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
@Override
public Task<? extends Serializable> createShowGrantTask(ASTNode ast, Path resultFile, HashSet<ReadEntity> inputs,
    HashSet<WriteEntity> outputs) throws SemanticException {
  SentryHivePrivilegeObjectDesc privHiveObj = null;

  ASTNode principal = (ASTNode) ast.getChild(0);
  PrincipalType type = PrincipalType.USER;
  switch (principal.getType()) {
  case HiveParser.TOK_USER:
    type = PrincipalType.USER;
    break;
  case HiveParser.TOK_GROUP:
    type = PrincipalType.GROUP;
    break;
  case HiveParser.TOK_ROLE:
    type = PrincipalType.ROLE;
    break;
  }
  if (type != PrincipalType.ROLE) {
    String msg = SentryHiveConstants.GRANT_REVOKE_NOT_SUPPORTED_FOR_PRINCIPAL + type;
    throw new SemanticException(msg);
  }
  String principalName = BaseSemanticAnalyzer.unescapeIdentifier(principal.getChild(0).getText());
  PrincipalDesc principalDesc = new PrincipalDesc(principalName, type);

  // Partition privileges are not supported by Sentry
  if (ast.getChildCount() > 1) {
    ASTNode child = (ASTNode) ast.getChild(1);
    if (child.getToken().getType() == HiveParser.TOK_PRIV_OBJECT_COL) {
      privHiveObj = analyzePrivilegeObject(child);
    } else {
      throw new SemanticException("Unrecognized Token: " + child.getToken().getType());
    }
  }

  ShowGrantDesc showGrant = new ShowGrantDesc(resultFile.toString(),
      principalDesc, privHiveObj);
  return createTask(new DDLWork(inputs, outputs, showGrant));
}
 
Example 9
Source File: SentryHiveAuthorizationTaskFactoryImpl.java    From incubator-sentry with Apache License 2.0 5 votes vote down vote up
private List<PrincipalDesc> analyzePrincipalListDef(ASTNode node) {
  List<PrincipalDesc> principalList = new ArrayList<PrincipalDesc>();
  for (int i = 0; i < node.getChildCount(); i++) {
    ASTNode child = (ASTNode) node.getChild(i);
    PrincipalType type = null;
    switch (child.getType()) {
    case 880:
      type = PrincipalType.USER;
      break;
    case HiveParser.TOK_USER:
      type = PrincipalType.USER;
      break;
    case 685:
      type = PrincipalType.GROUP;
      break;
    case HiveParser.TOK_GROUP:
      type = PrincipalType.GROUP;
      break;
    case 782:
      type = PrincipalType.ROLE;
      break;
    case HiveParser.TOK_ROLE:
      type = PrincipalType.ROLE;
      break;
    }
    String principalName = BaseSemanticAnalyzer.unescapeIdentifier(child.getChild(0).getText());
    PrincipalDesc principalDesc = new PrincipalDesc(principalName, type);
    LOG.debug("## Principal : [ " + principalName + ", " + type + "]");
    principalList.add(principalDesc);
  }
  return principalList;
}