Java Code Examples for org.apache.hadoop.hive.ql.plan.ExprNodeDesc

The following examples show how to use org.apache.hadoop.hive.ql.plan.ExprNodeDesc. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
private static void fieldEscaper(List<ExprNodeDesc> exprNodes, ExprNodeDesc parent, Set<String> columnNamesInNotInExpression) {
  if (exprNodes == null || exprNodes.isEmpty()) {
    return;
  } else {
    for (ExprNodeDesc nodeDesc : exprNodes) {
      String nodeType = nodeDesc.getTypeString().toLowerCase();
      if (QUOTED_TYPES.contains(nodeType)) {
        PrimitiveTypeInfo tInfo = new PrimitiveTypeInfo();
        tInfo.setTypeName(HIVE_STRING_TYPE_NAME);
        nodeDesc.setTypeInfo(tInfo);
      }
      addColumnNamesOfNotInExpressionToSet(nodeDesc, parent, columnNamesInNotInExpression);
      fieldEscaper(nodeDesc.getChildren(), nodeDesc, columnNamesInNotInExpression);
    }
  }
}
 
Example 2
Source Project: multiple-dimension-spread   Source File: NullHiveExpr.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public IExpressionNode getPushDownFilterNode(){
  if( nodeDescList.size() != 1 ){
    return null;
  }
  ExprNodeDesc columnDesc = nodeDescList.get( 0 );

  if( ! ( columnDesc instanceof ExprNodeColumnDesc ) ){
    return null;
  } 

  IExtractNode extractNode = CreateExtractNodeUtil.getExtractNode( columnDesc );
  if( extractNode == null ){
    return null;
  }

  ColumnType targetColumnType = MDSColumnTypeUtil.typeInfoToColumnType( columnDesc.getTypeInfo() );

  return new ExecuterNode( extractNode , new NullFilter( targetColumnType ) );
}
 
Example 3
Source Project: multiple-dimension-spread   Source File: NotNullHiveExpr.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public IExpressionNode getPushDownFilterNode(){
  if( nodeDescList.size() != 1 ){
    return null;
  }
  ExprNodeDesc columnDesc = nodeDescList.get( 0 );

  if( ! ( columnDesc instanceof ExprNodeColumnDesc ) ){
    return null;
  } 

  IExtractNode extractNode = CreateExtractNodeUtil.getExtractNode( columnDesc ); 
  if( extractNode == null ){
    return null;
  }

  ColumnType targetColumnType = MDSColumnTypeUtil.typeInfoToColumnType( columnDesc.getTypeInfo() );

  return new ExecuterNode( extractNode , new NotNullFilter( targetColumnType ) );
}
 
Example 4
Source Project: multiple-dimension-spread   Source File: InHiveExpr.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public IExpressionNode getPushDownFilterNode(){
  if( nodeDescList.size() < 2 ){
    return null;
  }
  ExprNodeDesc columnDesc = nodeDescList.get( 0 );
  IExtractNode extractNode = CreateExtractNodeUtil.getExtractNode( columnDesc );
  if( extractNode == null ){
    return null;
  }
  IFilter filter = getEqualsExecuter( nodeDescList , 1 );
  if( filter == null ){
    return null;
  }
  return new ExecuterNode( extractNode , filter );
}
 
Example 5
public DynamoDBFilter getFilter(DynamoDBFilterOperator operator, String columnName, String
    columnType, IndexSearchCondition condition) {
  switch (operator.getType()) {
    case UNARY:
      return getFilter(operator, columnName, columnType);
    case BINARY:
      return getFilter(operator, columnName, columnType, condition.getConstantDesc().getValue()
          .toString());
    case NARY:
      List<ExprNodeDesc> children = ShimsLoader.getHiveShims().getIndexExpression(condition)
          .getChildren();
      String[] values = new String[children.size() - 1];
      // This currently supports IN clause only
      // The first element is column name and rest of the elements are
      // the values it can take
      for (int i = 1; i < children.size(); i++) {
        values[i - 1] = ((ExprNodeConstantDesc) children.get(i)).getValue().toString();
      }
      return getFilter(operator, columnName, columnType, values);
    default:
      throw new RuntimeException("Unknown operator type. Operator: " + operator + " "
          + "OperatorType: " + operator.getType());
  }
}
 
Example 6
public DecomposedPredicate pushPredicate(Map<String, String> hiveTypeMapping, ExprNodeDesc
    predicate) {
  log.info("Checking predicates for pushdown in DynamoDB query");
  List<IndexSearchCondition> searchConditions = getGenericSearchConditions(hiveTypeMapping,
      predicate);
  log.info("Pushed predicates: " + searchConditions);
  if (searchConditions.isEmpty()) {
    return null;
  } else {
    List<IndexSearchCondition> finalSearchCondition =
        prioritizeSearchConditions(searchConditions);
    IndexPredicateAnalyzer analyzer = new IndexPredicateAnalyzer();
    DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
    decomposedPredicate.pushedPredicate =
        analyzer.translateSearchConditions(finalSearchCondition);
    decomposedPredicate.residualPredicate = (ExprNodeGenericFuncDesc) predicate;
    return decomposedPredicate;
  }
}
 
Example 7
private List<IndexSearchCondition> getGenericSearchConditions(Map<String, String> hiveTypeMapping,
    ExprNodeDesc predicate) {

  IndexPredicateAnalyzer analyzer = new IndexPredicateAnalyzer();

  // DynamoDB does not support filters on columns of types set
  for (Entry<String, String> entry : hiveTypeMapping.entrySet()) {
    if (eligibleHiveTypes.contains(entry.getValue())) {
      analyzer.allowColumnName(entry.getKey());
    }
  }

  for (DynamoDBFilterOperator op : DynamoDBFilterOperator.values()) {
    if (op.getHiveClass() != null) {
      analyzer.addComparisonOp(op.getHiveClass());
    }
  }

  List<IndexSearchCondition> searchConditions = new ArrayList<>();
  analyzer.analyzePredicate(predicate, searchConditions);
  return searchConditions;
}
 
Example 8
private DynamoDBQueryFilter getQueryFilter(JobConf conf, Map<String, String>
    hiveDynamoDBMapping, Map<String, String> hiveTypeMapping) throws IOException {
  if (hiveDynamoDBMapping == null) {
    /*
     * Column mapping may be null when user has mapped a DynamoDB item
     * onto a single hive map<string, string> column.
     */
    return new DynamoDBQueryFilter();
  }

  DynamoDBClient client = new DynamoDBClient(conf);
  String filterExprSerialized = conf.get(TableScanDesc.FILTER_EXPR_CONF_STR);
  if (filterExprSerialized == null) {
    return new DynamoDBQueryFilter();
  }
  ExprNodeDesc filterExpr =
      ShimsLoader.getHiveShims().deserializeExpression(filterExprSerialized);

  DynamoDBFilterPushdown pushdown = new DynamoDBFilterPushdown();
  List<KeySchemaElement> schema =
      client.describeTable(conf.get(DynamoDBConstants.TABLE_NAME)).getKeySchema();
  DynamoDBQueryFilter queryFilter = pushdown.predicateToDynamoDBFilter(
      schema, hiveDynamoDBMapping, hiveTypeMapping, filterExpr);
  return queryFilter;
}
 
Example 9
/**
 *
 *
 * @param conf JobConf
 * @param desc predicate expression node.
 * @return DecomposedPredicate containing translated search conditions the analyzer can support.
 */
public DecomposedPredicate decompose(JobConf conf, ExprNodeDesc desc) {


    IndexPredicateAnalyzer analyzer = newAnalyzer(conf);
    List<IndexSearchCondition> sConditions = new ArrayList<IndexSearchCondition>();
    ExprNodeDesc residualPredicate = analyzer.analyzePredicate(desc, sConditions);
    if(sConditions.size() == 0){
        if(log.isInfoEnabled())
            log.info("nothing to decompose. Returning");
        return null;
    }
    DecomposedPredicate decomposedPredicate  = new DecomposedPredicate();
    decomposedPredicate.pushedPredicate = analyzer.translateSearchConditions(sConditions);
    decomposedPredicate.residualPredicate = residualPredicate;
    return decomposedPredicate;
}
 
Example 10
private static void addColumnNamesOfNotInExpressionToSet(ExprNodeDesc childNode, ExprNodeDesc parentNode, Set<String> columnsInNotInExpression) {
  if (parentNode != null && childNode != null && parentNode instanceof ExprNodeGenericFuncDesc && childNode instanceof ExprNodeGenericFuncDesc) {
    ExprNodeGenericFuncDesc parentFuncNode = (ExprNodeGenericFuncDesc) parentNode;
    ExprNodeGenericFuncDesc childFuncNode = (ExprNodeGenericFuncDesc) childNode;
    if(parentFuncNode.getGenericUDF() instanceof GenericUDFOPNot && childFuncNode.getGenericUDF() instanceof GenericUDFIn) {
      // The current parent child pair represents a "NOT IN" expression. Add name of the column to the set.
      columnsInNotInExpression.addAll(childFuncNode.getCols());
    }
  }
}
 
Example 11
private ExprBuilder fn(String name, TypeInfo ti, int args) throws Exception {
  List<ExprNodeDesc> children = new ArrayList<>();
  for (int i = 0; i < args; ++i) {
    children.add(stack.pop());
  }
  stack.push(new ExprNodeGenericFuncDesc(ti, FunctionRegistry.getFunctionInfo(name).getGenericUDF(), children));
  return this;
}
 
Example 12
Source Project: multiple-dimension-spread   Source File: HiveExprOrNode.java    License: Apache License 2.0 5 votes vote down vote up
public HiveExprOrNode( final List<ExprNodeDesc> childExprNodeDesc ){
  for( ExprNodeDesc nodeChild : childExprNodeDesc  ){
    if( nodeChild instanceof ExprNodeGenericFuncDesc ){
      addChildNode( (ExprNodeGenericFuncDesc)nodeChild );
    }
    else if( ( nodeChild instanceof ExprNodeColumnDesc ) || ( nodeChild instanceof ExprNodeFieldDesc ) ){
      childNodeList.add( new BooleanHiveExpr( nodeChild ) );
    }
    else{
      childNodeList.add( new UnsupportHiveExpr() );
    }
  }
}
 
Example 13
Source Project: multiple-dimension-spread   Source File: HiveExprAndNode.java    License: Apache License 2.0 5 votes vote down vote up
public HiveExprAndNode( final List<ExprNodeDesc> childExprNodeDesc ){
  for( ExprNodeDesc nodeChild : childExprNodeDesc  ){
    if( nodeChild instanceof ExprNodeGenericFuncDesc ){
      addChildNode( (ExprNodeGenericFuncDesc)nodeChild );
    }
    else if( ( nodeChild instanceof ExprNodeColumnDesc ) || ( nodeChild instanceof ExprNodeFieldDesc ) ){
      childNodeList.add( new BooleanHiveExpr( nodeChild ) );
    }
    else{
      childNodeList.add( new UnsupportHiveExpr() );
    }
  }
}
 
Example 14
Source Project: multiple-dimension-spread   Source File: HiveExprNotNode.java    License: Apache License 2.0 5 votes vote down vote up
public HiveExprNotNode( final List<ExprNodeDesc> childExprNodeDesc ){
  for( ExprNodeDesc nodeChild : childExprNodeDesc  ){
    if( nodeChild instanceof ExprNodeGenericFuncDesc ){
      addChildNode( (ExprNodeGenericFuncDesc)nodeChild );
    }
    else if( ( nodeChild instanceof ExprNodeColumnDesc ) || ( nodeChild instanceof ExprNodeFieldDesc ) ){
      childNodeList.add( new BooleanHiveExpr( nodeChild ) );
    }
    else{
      childNodeList.add( new UnsupportHiveExpr() );
    }
  }
}
 
Example 15
public static IExtractNode getExtractNode(final ExprNodeDesc target ){
  if( target instanceof ExprNodeGenericFuncDesc ){
    return getExtractNodeFromGenericFunc( (ExprNodeGenericFuncDesc)target );
  }
  else if( target instanceof ExprNodeFieldDesc ){
    return getExtractNodeFromField( (ExprNodeFieldDesc)target  );
  }
  else if( target instanceof ExprNodeColumnDesc ){
    if( ( (ExprNodeColumnDesc)target ).getIsPartitionColOrVirtualCol() ){
      return null;
    }
    return getExtractNodeFromColumn( (ExprNodeColumnDesc)target  );
  }
  return null;
}
 
Example 16
Source Project: multiple-dimension-spread   Source File: InHiveExpr.java    License: Apache License 2.0 5 votes vote down vote up
public static PrimitiveObjectInspector getPrimitiveObjectInspector( final ExprNodeDesc exprNode ){
  if( ! ( exprNode instanceof ExprNodeConstantDesc ) ){
    return null;
  }
  ExprNodeConstantDesc constDesc = (ExprNodeConstantDesc)exprNode;
  ObjectInspector objectInspector = constDesc.getWritableObjectInspector();
  if( objectInspector.getCategory() != ObjectInspector.Category.PRIMITIVE ){
    return null;
  }
  return (PrimitiveObjectInspector)objectInspector;
}
 
Example 17
Source Project: multiple-dimension-spread   Source File: InHiveExpr.java    License: Apache License 2.0 5 votes vote down vote up
public static IFilter getEqualsExecuter( final List<ExprNodeDesc> nodeDescList , final int start ){
  PrimitiveObjectInspector rootPrimitiveObjectInspector = getPrimitiveObjectInspector( nodeDescList.get( start ) );
  if( rootPrimitiveObjectInspector == null ){
    return null;
  }
  PrimitiveObjectInspector.PrimitiveCategory primitiveCategory = rootPrimitiveObjectInspector.getPrimitiveCategory();
  IFilter filter = null;
  switch( rootPrimitiveObjectInspector.getPrimitiveCategory() ){
    case STRING:
      Set<String> stringDic = new HashSet<String>();
      for( int i = start ; i < nodeDescList.size() ; i++ ){
        PrimitiveObjectInspector primitiveObjectInspector  = getPrimitiveObjectInspector( nodeDescList.get( i ) );
        if( primitiveObjectInspector == null || primitiveObjectInspector.getPrimitiveCategory() != rootPrimitiveObjectInspector.getPrimitiveCategory() ){
          return null;
        }
        stringDic.add( ( (WritableConstantStringObjectInspector)primitiveObjectInspector ).getWritableConstantValue().toString() );
      }
      return new StringDictionaryFilter( stringDic );
    case BYTE:
    case SHORT:
    case INT:
    case LONG:
    case FLOAT:
    case DOUBLE:
    default:
      return null;
  }
}
 
Example 18
@Override
public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer,
    ExprNodeDesc predicate) {
  if (jobConf.getBoolean(DynamoDBConstants.DYNAMODB_FILTER_PUSHDOWN, true)) {
    return new DynamoDBFilterPushdown()
        .pushPredicate(HiveDynamoDBUtil.extractHiveTypeMapping(jobConf), predicate);
  } else {
    return null;
  }
}
 
Example 19
public DynamoDBQueryFilter predicateToDynamoDBFilter(List<KeySchemaElement> schema, Map<String,
    String> hiveDynamoDBMapping, Map<String, String> hiveTypeMapping, ExprNodeDesc predicate) {
  List<IndexSearchCondition> searchConditions = getGenericSearchConditions(hiveTypeMapping,
      predicate);

  if (searchConditions.isEmpty()) {
    return null;
  }

  Map<String, DynamoDBFilter> filterMap = new HashMap<>();
  DynamoDBFilterFactory factory = new DynamoDBFilterFactory();

  // The search conditions are supposed to be unique at this point, so not
  // prioritizing them
  for (IndexSearchCondition condition : searchConditions) {
    String hiveColumnName = condition.getColumnDesc().getColumn();
    String dynamoDBColumnName = hiveDynamoDBMapping.get(hiveColumnName);
    DynamoDBFilterOperator op =
        DynamoDBFilterOperator.getFilterOperationFromHiveClass(condition.getComparisonOp());
    DynamoDBFilter filter =
        factory.getFilter(op, dynamoDBColumnName, hiveTypeMapping.get(hiveColumnName), condition);

    if (filterMap.containsKey(dynamoDBColumnName)) {
      // We have special case code for DynamoDB filter BETWEEN because
      // it does not directly map to any Hive predicate
      DynamoDBFilter existingFilter = filterMap.get(dynamoDBColumnName);
      if (isBetweenFilter(op, existingFilter.getOperator())) {
        filterMap.put(dynamoDBColumnName, getBetweenFilter(filter, existingFilter));
      } else {
        throw new RuntimeException("Found two filters for same column: " + dynamoDBColumnName
            + " Filter 1: " + op + " Filter 2: " + existingFilter.getOperator());
      }
    } else {
      filterMap.put(dynamoDBColumnName, filter);
    }
  }

  return getDynamoDBQueryFilter(schema, filterMap);
}
 
Example 20
Source Project: HiveKudu-Handler   Source File: KuduStorageHandler.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public DecomposedPredicate decomposePredicate(JobConf jobConf,
                                              Deserializer deserializer, ExprNodeDesc predicate) {
    // TODO: Implement push down to Kudu here.
    DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
    return decomposedPredicate;
}
 
Example 21
Source Project: parquet-mr   Source File: Hive012Binding.java    License: Apache License 2.0 5 votes vote down vote up
private void pushFilters(final JobConf jobConf, final TableScanOperator tableScan) {

    final TableScanDesc scanDesc = tableScan.getConf();
    if (scanDesc == null) {
      LOG.debug("Not pushing filters because TableScanDesc is null");
      return;
    }

    // construct column name list for reference by filter push down
    Utilities.setColumnNameList(jobConf, tableScan);

    // push down filters
    final ExprNodeDesc filterExpr = scanDesc.getFilterExpr();
    if (filterExpr == null) {
      LOG.debug("Not pushing filters because FilterExpr is null");
      return;
    }

    final String filterText = filterExpr.getExprString();
    final String filterExprSerialized = Utilities.serializeExpression(filterExpr);
    jobConf.set(
            TableScanDesc.FILTER_TEXT_CONF_STR,
            filterText);
    jobConf.set(
            TableScanDesc.FILTER_EXPR_CONF_STR,
            filterExprSerialized);
  }
 
Example 22
Source Project: parquet-mr   Source File: Hive010Binding.java    License: Apache License 2.0 5 votes vote down vote up
private void pushFilters(final JobConf jobConf, final TableScanOperator tableScan) {

    final TableScanDesc scanDesc = tableScan.getConf();
    if (scanDesc == null) {
      LOG.debug("Not pushing filters because TableScanDesc is null");
      return;
    }

    // construct column name list for reference by filter push down
    Utilities.setColumnNameList(jobConf, tableScan);

    // push down filters
    final ExprNodeDesc filterExpr = scanDesc.getFilterExpr();
    if (filterExpr == null) {
      LOG.debug("Not pushing filters because FilterExpr is null");
      return;
    }

    final String filterText = filterExpr.getExprString();
    final String filterExprSerialized = Utilities.serializeExpression(filterExpr);
    jobConf.set(
            TableScanDesc.FILTER_TEXT_CONF_STR,
            filterText);
    jobConf.set(
            TableScanDesc.FILTER_EXPR_CONF_STR,
            filterExprSerialized);
  }
 
Example 23
/**
 *
 * @param conf JobConf
 * @return list of IndexSearchConditions from the filter expression.
 */
public List<IndexSearchCondition> getSearchConditions(JobConf conf) {
    List<IndexSearchCondition> sConditions = Lists.newArrayList();
    String filteredExprSerialized = conf.get(TableScanDesc.FILTER_EXPR_CONF_STR);
    if(filteredExprSerialized == null)
        return sConditions;
    ExprNodeDesc filterExpr = Utilities.deserializeExpression(filteredExprSerialized, conf);
    IndexPredicateAnalyzer analyzer = newAnalyzer(conf);
    ExprNodeDesc residual = analyzer.analyzePredicate(filterExpr, sConditions);
    if(residual != null)
        throw new RuntimeException("Unexpected residual predicate: " + residual.getExprString());
    return sConditions;
}
 
Example 24
@Override
public DecomposedPredicate decomposePredicate(JobConf conf,
                                              Deserializer deserializer,
                                              ExprNodeDesc desc) {
    if(conf.get(AccumuloSerde.NO_ITERATOR_PUSHDOWN) == null){
        return predicateHandler.decompose(conf, desc);
    } else {
        log.info("Set to ignore iterator. skipping predicate handler");
        return null;
    }
}
 
Example 25
Source Project: multiple-dimension-spread   Source File: CompareHiveExpr.java    License: Apache License 2.0 4 votes vote down vote up
public CompareHiveExpr( final List<ExprNodeDesc> nodeDescList , final StringCompareFilterType stringCompareType , final NumberFilterType numberCompareType ){
  this.nodeDescList = nodeDescList;
  this.stringCompareType = stringCompareType;
  this.numberCompareType = numberCompareType;
}
 
Example 26
public NotEqualsHiveExpr( final List<ExprNodeDesc> nodeDescList ){
  super( nodeDescList );
}
 
Example 27
Source Project: multiple-dimension-spread   Source File: BetweenHiveExpr.java    License: Apache License 2.0 4 votes vote down vote up
public BetweenHiveExpr( final List<ExprNodeDesc> nodeDescList ){
  this.nodeDescList = nodeDescList;
}
 
Example 28
Source Project: multiple-dimension-spread   Source File: BetweenHiveExpr.java    License: Apache License 2.0 4 votes vote down vote up
@Override
public IExpressionNode getPushDownFilterNode(){
  if( nodeDescList.size() != 4 ){
    return null;
  }
  ExprNodeDesc constNode1 = nodeDescList.get( 0 );
  ExprNodeDesc columnNode = nodeDescList.get( 1 );
  ExprNodeDesc constNode2 = nodeDescList.get( 2 );
  ExprNodeDesc constNode3 = nodeDescList.get( 3 );

  if( ! ( constNode1 instanceof ExprNodeConstantDesc ) || ! ( constNode2 instanceof ExprNodeConstantDesc ) || ! ( constNode3 instanceof ExprNodeConstantDesc ) ){
    return null;
  } 
  ExprNodeConstantDesc booleanNode = (ExprNodeConstantDesc)constNode1;
  ObjectInspector booleanOjectInspector = booleanNode.getWritableObjectInspector();
  if( booleanOjectInspector.getCategory() != ObjectInspector.Category.PRIMITIVE ){
    return null;
  }
  PrimitiveObjectInspector booleanPrimitiveObjectInspector = (PrimitiveObjectInspector)booleanOjectInspector;
  if( booleanPrimitiveObjectInspector.getPrimitiveCategory() != PrimitiveObjectInspector.PrimitiveCategory.BOOLEAN ){
    return null;
  }
  boolean invert = ( (WritableConstantBooleanObjectInspector)booleanPrimitiveObjectInspector ).getWritableConstantValue().get();

  ExprNodeConstantDesc minNode = (ExprNodeConstantDesc)constNode2;
  ExprNodeConstantDesc maxNode = (ExprNodeConstantDesc)constNode3;

  ObjectInspector minOjectInspector = minNode.getWritableObjectInspector();
  ObjectInspector maxOjectInspector = maxNode.getWritableObjectInspector();
  if( minOjectInspector.getCategory() != ObjectInspector.Category.PRIMITIVE || maxOjectInspector.getCategory() != ObjectInspector.Category.PRIMITIVE ){
    return null;
  }
  PrimitiveObjectInspector minPrimitiveObjectInspector = (PrimitiveObjectInspector)minOjectInspector;
  PrimitiveObjectInspector maxPrimitiveObjectInspector = (PrimitiveObjectInspector)maxOjectInspector;
  if( minPrimitiveObjectInspector.getPrimitiveCategory() != maxPrimitiveObjectInspector.getPrimitiveCategory() ){
    return null;
  }

  IExtractNode extractNode = CreateExtractNodeUtil.getExtractNode( columnNode );
  if( extractNode == null ){
    return null;
  }

  return getRangeExecuter( invert , minPrimitiveObjectInspector , maxPrimitiveObjectInspector , extractNode );
}
 
Example 29
Source Project: multiple-dimension-spread   Source File: EqualsHiveExpr.java    License: Apache License 2.0 4 votes vote down vote up
public EqualsHiveExpr( final List<ExprNodeDesc> nodeDescList ){
  this.nodeDescList = nodeDescList;
}
 
Example 30
Source Project: multiple-dimension-spread   Source File: NullHiveExpr.java    License: Apache License 2.0 4 votes vote down vote up
public NullHiveExpr( final List<ExprNodeDesc> nodeDescList ){
  this.nodeDescList = nodeDescList;
}