Java Code Examples for org.apache.calcite.rel.core.Filter#getCondition()

The following examples show how to use org.apache.calcite.rel.core.Filter#getCondition() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FilterRemoveIsNotDistinctFromRule.java    From Bats with Apache License 2.0 6 votes vote down vote up
@Override
public void onMatch(RelOptRuleCall call) {
    Filter oldFilter = call.rel(0);
    RexNode oldFilterCond = oldFilter.getCondition();

    if (RexUtil.findOperatorCall(SqlStdOperatorTable.IS_NOT_DISTINCT_FROM, oldFilterCond) == null) {
        // no longer contains isNotDistinctFromOperator
        return;
    }

    // Now replace all the "a isNotDistinctFrom b"
    // with the RexNode given by RelOptUtil.isDistinctFrom() method

    RemoveIsNotDistinctFromRexShuttle rewriteShuttle = new RemoveIsNotDistinctFromRexShuttle(
            oldFilter.getCluster().getRexBuilder());

    final RelBuilder relBuilder = call.builder();
    final RelNode newFilterRel = relBuilder.push(oldFilter.getInput()).filter(oldFilterCond.accept(rewriteShuttle))
            .build();

    call.transformTo(newFilterRel);
}
 
Example 2
Source File: DruidRules.java    From calcite with Apache License 2.0 6 votes vote down vote up
@Override public void onMatch(RelOptRuleCall call) {
  final Filter filter = call.rel(0);
  final DruidQuery query = call.rel(1);

  if (!DruidQuery.isValidSignature(query.signature() + 'h')) {
    return;
  }

  final RexNode cond = filter.getCondition();
  final DruidJsonFilter druidJsonFilter = DruidJsonFilter
      .toDruidFilters(cond, query.getTopNode().getRowType(), query);
  if (druidJsonFilter != null) {
    final RelNode newFilter = filter
        .copy(filter.getTraitSet(), Util.last(query.rels), filter.getCondition());
    final DruidQuery newDruidQuery = DruidQuery.extendQuery(query, newFilter);
    call.transformTo(newDruidQuery);
  }
}
 
Example 3
Source File: RelMdAllPredicates.java    From Bats with Apache License 2.0 5 votes vote down vote up
/**
 * Add the Filter condition to the list obtained from the input.
 */
public RelOptPredicateList getAllPredicates(Filter filter, RelMetadataQuery mq) {
  final RelNode input = filter.getInput();
  final RexBuilder rexBuilder = filter.getCluster().getRexBuilder();
  final RexNode pred = filter.getCondition();

  final RelOptPredicateList predsBelow = mq.getAllPredicates(input);
  if (predsBelow == null) {
    // Safety check
    return null;
  }

  // Extract input fields referenced by Filter condition
  final Set<RelDataTypeField> inputExtraFields = new LinkedHashSet<>();
  final RelOptUtil.InputFinder inputFinder = new RelOptUtil.InputFinder(inputExtraFields);
  pred.accept(inputFinder);
  final ImmutableBitSet inputFieldsUsed = inputFinder.inputBitSet.build();

  // Infer column origin expressions for given references
  final Map<RexInputRef, Set<RexNode>> mapping = new LinkedHashMap<>();
  for (int idx : inputFieldsUsed) {
    final RexInputRef ref = RexInputRef.of(idx, filter.getRowType().getFieldList());
    final Set<RexNode> originalExprs = mq.getExpressionLineage(filter, ref);
    if (originalExprs == null) {
      // Bail out
      return null;
    }
    mapping.put(ref, originalExprs);
  }

  // Replace with new expressions and return union of predicates
  final Set<RexNode> allExprs =
      RelMdExpressionLineage.createAllPossibleExpressions(rexBuilder, pred, mapping);
  if (allExprs == null) {
    return null;
  }
  return predsBelow.union(rexBuilder, RelOptPredicateList.of(rexBuilder, allExprs));
}
 
Example 4
Source File: FilterSetOpTransposeRule.java    From Bats with Apache License 2.0 5 votes vote down vote up
public void onMatch(RelOptRuleCall call) {
  Filter filterRel = call.rel(0);
  SetOp setOp = call.rel(1);

  RexNode condition = filterRel.getCondition();

  // create filters on top of each setop child, modifying the filter
  // condition to reference each setop child
  RexBuilder rexBuilder = filterRel.getCluster().getRexBuilder();
  final RelBuilder relBuilder = call.builder();
  List<RelDataTypeField> origFields =
      setOp.getRowType().getFieldList();
  int[] adjustments = new int[origFields.size()];
  final List<RelNode> newSetOpInputs = new ArrayList<>();
  for (RelNode input : setOp.getInputs()) {
    RexNode newCondition =
        condition.accept(
            new RelOptUtil.RexInputConverter(
                rexBuilder,
                origFields,
                input.getRowType().getFieldList(),
                adjustments));
    newSetOpInputs.add(relBuilder.push(input).filter(newCondition).build());
  }

  // create a new setop whose children are the filters created above
  SetOp newSetOp =
      setOp.copy(setOp.getTraitSet(), newSetOpInputs);

  call.transformTo(newSetOp);
}
 
Example 5
Source File: RelFieldTrimmer.java    From Bats with Apache License 2.0 5 votes vote down vote up
/**
 * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
 * {@link org.apache.calcite.rel.logical.LogicalFilter}.
 */
public TrimResult trimFields(Filter filter, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields) {
    final RelDataType rowType = filter.getRowType();
    final int fieldCount = rowType.getFieldCount();
    final RexNode conditionExpr = filter.getCondition();
    final RelNode input = filter.getInput();

    // We use the fields used by the consumer, plus any fields used in the
    // filter.
    final Set<RelDataTypeField> inputExtraFields = new LinkedHashSet<>(extraFields);
    RelOptUtil.InputFinder inputFinder = new RelOptUtil.InputFinder(inputExtraFields);
    inputFinder.inputBitSet.addAll(fieldsUsed);
    conditionExpr.accept(inputFinder);
    final ImmutableBitSet inputFieldsUsed = inputFinder.inputBitSet.build();

    // Create input with trimmed columns.
    TrimResult trimResult = trimChild(filter, input, inputFieldsUsed, inputExtraFields);
    RelNode newInput = trimResult.left;
    final Mapping inputMapping = trimResult.right;

    // If the input is unchanged, and we need to project all columns,
    // there's nothing we can do.
    if (newInput == input && fieldsUsed.cardinality() == fieldCount) {
        return result(filter, Mappings.createIdentity(fieldCount));
    }

    // Build new project expressions, and populate the mapping.
    final RexVisitor<RexNode> shuttle = new RexPermuteInputsShuttle(inputMapping, newInput);
    RexNode newConditionExpr = conditionExpr.accept(shuttle);

    // Use copy rather than relBuilder so that correlating variables get set.
    relBuilder.push(filter.copy(filter.getTraitSet(), newInput, newConditionExpr));

    // The result has the same mapping as the input gave us. Sometimes we
    // return fields that the consumer didn't ask for, because the filter
    // needs them for its condition.
    return result(relBuilder.build(), inputMapping);
}
 
Example 6
Source File: DruidQuery.java    From calcite with Apache License 2.0 5 votes vote down vote up
/**
 * Translates Filter rel to Druid Filter Json object if possible.
 * Currently Filter rel input has to be Druid Table scan
 *
 * @param filterRel input filter rel
 * @param druidQuery Druid query
 *
 * @return DruidJson Filter or null if can not translate one of filters
 */
@Nullable
private static DruidJsonFilter computeFilter(@Nullable Filter filterRel,
    DruidQuery druidQuery) {
  if (filterRel == null) {
    return null;
  }
  final RexNode filter = filterRel.getCondition();
  final RelDataType inputRowType = filterRel.getInput().getRowType();
  if (filter != null) {
    return DruidJsonFilter.toDruidFilters(filter, inputRowType, druidQuery);
  }
  return null;
}
 
Example 7
Source File: RelMdAllPredicates.java    From calcite with Apache License 2.0 5 votes vote down vote up
/**
 * Add the Filter condition to the list obtained from the input.
 */
public RelOptPredicateList getAllPredicates(Filter filter, RelMetadataQuery mq) {
  final RelNode input = filter.getInput();
  final RexBuilder rexBuilder = filter.getCluster().getRexBuilder();
  final RexNode pred = filter.getCondition();

  final RelOptPredicateList predsBelow = mq.getAllPredicates(input);
  if (predsBelow == null) {
    // Safety check
    return null;
  }

  // Extract input fields referenced by Filter condition
  final Set<RelDataTypeField> inputExtraFields = new LinkedHashSet<>();
  final RelOptUtil.InputFinder inputFinder = new RelOptUtil.InputFinder(inputExtraFields);
  pred.accept(inputFinder);
  final ImmutableBitSet inputFieldsUsed = inputFinder.build();

  // Infer column origin expressions for given references
  final Map<RexInputRef, Set<RexNode>> mapping = new LinkedHashMap<>();
  for (int idx : inputFieldsUsed) {
    final RexInputRef ref = RexInputRef.of(idx, filter.getRowType().getFieldList());
    final Set<RexNode> originalExprs = mq.getExpressionLineage(filter, ref);
    if (originalExprs == null) {
      // Bail out
      return null;
    }
    mapping.put(ref, originalExprs);
  }

  // Replace with new expressions and return union of predicates
  final Set<RexNode> allExprs =
      RelMdExpressionLineage.createAllPossibleExpressions(rexBuilder, pred, mapping);
  if (allExprs == null) {
    return null;
  }
  return predsBelow.union(rexBuilder, RelOptPredicateList.of(rexBuilder, allExprs));
}
 
Example 8
Source File: FilterRemoveIsNotDistinctFromRule.java    From calcite with Apache License 2.0 5 votes vote down vote up
public void onMatch(RelOptRuleCall call) {
  Filter oldFilter = call.rel(0);
  RexNode oldFilterCond = oldFilter.getCondition();

  if (RexUtil.findOperatorCall(
      SqlStdOperatorTable.IS_NOT_DISTINCT_FROM,
      oldFilterCond)
      == null) {
    // no longer contains isNotDistinctFromOperator
    return;
  }

  // Now replace all the "a isNotDistinctFrom b"
  // with the RexNode given by RelOptUtil.isDistinctFrom() method

  RemoveIsNotDistinctFromRexShuttle rewriteShuttle =
      new RemoveIsNotDistinctFromRexShuttle(
          oldFilter.getCluster().getRexBuilder());

  final RelBuilder relBuilder = call.builder();
  final RelNode newFilterRel = relBuilder
      .push(oldFilter.getInput())
      .filter(oldFilterCond.accept(rewriteShuttle))
      .build();

  call.transformTo(newFilterRel);
}
 
Example 9
Source File: FilterSetOpTransposeRule.java    From calcite with Apache License 2.0 5 votes vote down vote up
public void onMatch(RelOptRuleCall call) {
  Filter filterRel = call.rel(0);
  SetOp setOp = call.rel(1);

  RexNode condition = filterRel.getCondition();

  // create filters on top of each setop child, modifying the filter
  // condition to reference each setop child
  RexBuilder rexBuilder = filterRel.getCluster().getRexBuilder();
  final RelBuilder relBuilder = call.builder();
  List<RelDataTypeField> origFields =
      setOp.getRowType().getFieldList();
  int[] adjustments = new int[origFields.size()];
  final List<RelNode> newSetOpInputs = new ArrayList<>();
  for (RelNode input : setOp.getInputs()) {
    RexNode newCondition =
        condition.accept(
            new RelOptUtil.RexInputConverter(
                rexBuilder,
                origFields,
                input.getRowType().getFieldList(),
                adjustments));
    newSetOpInputs.add(relBuilder.push(input).filter(newCondition).build());
  }

  // create a new setop whose children are the filters created above
  SetOp newSetOp =
      setOp.copy(setOp.getTraitSet(), newSetOpInputs);

  call.transformTo(newSetOp);
}
 
Example 10
Source File: JdbcRules.java    From calcite with Apache License 2.0 5 votes vote down vote up
public RelNode convert(RelNode rel) {
  final Filter filter = (Filter) rel;

  return new JdbcFilter(
      rel.getCluster(),
      rel.getTraitSet().replace(out),
      convert(filter.getInput(),
          filter.getInput().getTraitSet().replace(out)),
      filter.getCondition());
}
 
Example 11
Source File: ORCFilterPushDownRule.java    From dremio-oss with Apache License 2.0 4 votes vote down vote up
@Override
public void onMatch(RelOptRuleCall call) {
  final Filter filter = call.rel(0);
  final HiveScanDrel scan = call.rel(1);
  final RexBuilder rexBuilder = filter.getCluster().getRexBuilder();
  final RexNode originalFilter = filter.getCondition();

  try {
    RexNode filterThatCanBePushed = originalFilter.accept(new ORCFindRelevantFilters(rexBuilder));
    if (filterThatCanBePushed == null) {
      return;
    }

    // Convert the filter expression that is just an input ref on bool column into a function call.
    // SearchArgumentGenerator can only work on filter expressions where root is a function call.
    filterThatCanBePushed =
        ORCFindRelevantFilters.convertBooleanInputRefToFunctionCall(rexBuilder, filterThatCanBePushed);

    final HiveTableXattr tableXattr =
      HiveTableXattr.parseFrom(scan.getTableMetadata().getReadDefinition().getExtendedProperty().asReadOnlyByteBuffer());
    final List<HiveReaderProto.ColumnInfo> columnInfos = tableXattr.getColumnInfoList();
    List<HiveReaderProto.ColumnInfo> selectedColumnInfos = new ArrayList<>();
    final List<String> columnNames = scan.getRowType().getFieldNames();
    final Set<String> columnNameSet = columnNames.stream().map(String::toUpperCase).collect(Collectors.toSet());
    final BatchSchema scanTableSchema = scan.getTableMetadata().getSchema();

    // columnInfos contains hive data type info
    // scanTableSchema is table BatchSchema
    // columnNames are selected / projected column names
    // Here we prepare column info that contains hive data type information for selected columns
    // Iterate over all fields of table schema, and if it is in projected columnNames list,
    // then add ColumnInfo to selectedColumnInfos
    if (columnInfos.size() == scanTableSchema.getFieldCount()) {
      for (int fieldPos = 0; fieldPos < scanTableSchema.getFieldCount(); ++fieldPos) {
        if (columnNameSet.contains(scanTableSchema.getColumn(fieldPos).getName().toUpperCase())) {
          selectedColumnInfos.add(columnInfos.get(fieldPos));
        }
      }
    }

    final ORCSearchArgumentGenerator sargGenerator = new ORCSearchArgumentGenerator(columnNames, selectedColumnInfos);
    filterThatCanBePushed.accept(sargGenerator);
    final SearchArgument sarg = sargGenerator.get();

    final HiveProxiedOrcScanFilter proxiedOrcScanFilter = new ORCScanFilter(sarg, pluginId);
    final HiveProxyingOrcScanFilter proxyingOrcScanFilter = new HiveProxyingOrcScanFilter(pluginId.getName(), proxiedOrcScanFilter);

    final RelNode newScan = scan.applyFilter(proxyingOrcScanFilter);

    // We still need the original filter in Filter operator as the ORC filtering is based only on the stripe stats and
    // we could end up with values out of ORC reader that don't satisfy the filter.
    call.transformTo(filter.copy(filter.getTraitSet(), newScan, originalFilter));
  } catch (Exception e) {
    logger.warn("Failed to push filter into ORC reader", e);
    // ignore the exception and continue with planning
  }
}
 
Example 12
Source File: ProjectFilterTransposeRule.java    From calcite with Apache License 2.0 4 votes vote down vote up
public void onMatch(RelOptRuleCall call) {
  final Project origProject;
  final Filter filter;
  if (call.rels.length >= 2) {
    origProject = call.rel(0);
    filter = call.rel(1);
  } else {
    origProject = null;
    filter = call.rel(0);
  }
  final RelNode input = filter.getInput();
  final RexNode origFilter = filter.getCondition();

  if ((origProject != null)
      && RexOver.containsOver(origProject.getProjects(), null)) {
    // Cannot push project through filter if project contains a windowed
    // aggregate -- it will affect row counts. Abort this rule
    // invocation; pushdown will be considered after the windowed
    // aggregate has been implemented. It's OK if the filter contains a
    // windowed aggregate.
    return;
  }

  if ((origProject != null)
      && origProject.getRowType().isStruct()
      && origProject.getRowType().getFieldList().stream()
        .anyMatch(RelDataTypeField::isDynamicStar)) {
    // The PushProjector would change the plan:
    //
    //    prj(**=[$0])
    //    : - filter
    //        : - scan
    //
    // to form like:
    //
    //    prj(**=[$0])                    (1)
    //    : - filter                      (2)
    //        : - prj(**=[$0], ITEM= ...) (3)
    //            :  - scan
    // This new plan has more cost that the old one, because of the new
    // redundant project (3), if we also have FilterProjectTransposeRule in
    // the rule set, it will also trigger infinite match of the ProjectMergeRule
    // for project (1) and (3).
    return;
  }

  final RelBuilder builder = call.builder();
  final RelNode topProject;
  if (origProject != null && (wholeProject || wholeFilter)) {
    builder.push(input);

    final Set<RexNode> set = new LinkedHashSet<>();
    final RelOptUtil.InputFinder refCollector = new RelOptUtil.InputFinder();

    if (wholeFilter) {
      set.add(filter.getCondition());
    } else {
      filter.getCondition().accept(refCollector);
    }
    if (wholeProject) {
      set.addAll(origProject.getProjects());
    } else {
      refCollector.visitEach(origProject.getProjects());
    }

    // Build a list with inputRefs, in order, first, then other expressions.
    final List<RexNode> list = new ArrayList<>();
    final ImmutableBitSet refs = refCollector.build();
    for (RexNode field : builder.fields()) {
      if (refs.get(((RexInputRef) field).getIndex()) || set.contains(field)) {
        list.add(field);
      }
    }
    set.removeAll(list);
    list.addAll(set);
    builder.project(list);
    final Replacer replacer = new Replacer(list, builder);
    builder.filter(replacer.visit(filter.getCondition()));
    builder.project(replacer.visitList(origProject.getProjects()),
        origProject.getRowType().getFieldNames());
    topProject = builder.build();
  } else {
    // The traditional mode of operation of this rule: push down field
    // references. The effect is similar to RelFieldTrimmer.
    final PushProjector pushProjector =
        new PushProjector(origProject, origFilter, input,
            preserveExprCondition, builder);
    topProject = pushProjector.convertProject(null);
  }

  if (topProject != null) {
    call.transformTo(topProject);
  }
}
 
Example 13
Source File: RelFieldTrimmer.java    From calcite with Apache License 2.0 4 votes vote down vote up
/**
 * Variant of {@link #trimFields(RelNode, ImmutableBitSet, Set)} for
 * {@link org.apache.calcite.rel.logical.LogicalFilter}.
 */
public TrimResult trimFields(
    Filter filter,
    ImmutableBitSet fieldsUsed,
    Set<RelDataTypeField> extraFields) {
  final RelDataType rowType = filter.getRowType();
  final int fieldCount = rowType.getFieldCount();
  final RexNode conditionExpr = filter.getCondition();
  final RelNode input = filter.getInput();

  // We use the fields used by the consumer, plus any fields used in the
  // filter.
  final Set<RelDataTypeField> inputExtraFields =
      new LinkedHashSet<>(extraFields);
  RelOptUtil.InputFinder inputFinder =
      new RelOptUtil.InputFinder(inputExtraFields, fieldsUsed);
  conditionExpr.accept(inputFinder);
  final ImmutableBitSet inputFieldsUsed = inputFinder.build();

  // Create input with trimmed columns.
  TrimResult trimResult =
      trimChild(filter, input, inputFieldsUsed, inputExtraFields);
  RelNode newInput = trimResult.left;
  final Mapping inputMapping = trimResult.right;

  // If the input is unchanged, and we need to project all columns,
  // there's nothing we can do.
  if (newInput == input
      && fieldsUsed.cardinality() == fieldCount) {
    return result(filter, Mappings.createIdentity(fieldCount));
  }

  // Build new project expressions, and populate the mapping.
  final RexVisitor<RexNode> shuttle =
      new RexPermuteInputsShuttle(inputMapping, newInput);
  RexNode newConditionExpr =
      conditionExpr.accept(shuttle);

  // Build new filter with trimmed input and condition.
  relBuilder.push(newInput)
      .filter(filter.getVariablesSet(), newConditionExpr);

  // The result has the same mapping as the input gave us. Sometimes we
  // return fields that the consumer didn't ask for, because the filter
  // needs them for its condition.
  return result(relBuilder.build(), inputMapping);
}