Java Code Examples for org.apache.kylin.metadata.filter.TupleFilter#collectColumns()

The following examples show how to use org.apache.kylin.metadata.filter.TupleFilter#collectColumns() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: OLAPFilterRel.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
void translateFilter(OLAPContext context) {
    if (this.condition == null) {
        return;
    }

    TupleFilterVisitor visitor = new TupleFilterVisitor(this.columnRowType);
    boolean isRealtimeTable = columnRowType.getColumnByIndex(0).getColumnDesc().getTable().isStreamingTable() ;
    autoJustTimezone = isRealtimeTable && autoJustTimezone;
    visitor.setAutoJustByTimezone(autoJustTimezone);
    TupleFilter filter = this.condition.accept(visitor);

    // optimize the filter, the optimization has to be segment-irrelevant
    filter = new FilterOptimizeTransformer().transform(filter);

    Set<TblColRef> filterColumns = Sets.newHashSet();
    TupleFilter.collectColumns(filter, filterColumns);
    for (TblColRef tblColRef : filterColumns) {
        if (!tblColRef.isInnerColumn() && context.belongToContextTables(tblColRef)) {
            context.allColumns.add(tblColRef);
            context.filterColumns.add(tblColRef);
        }
    }

    context.filter = and(context.filter, filter);
}
 
Example 2
Source File: OLAPFilterRel.java    From kylin with Apache License 2.0 6 votes vote down vote up
void translateFilter(OLAPContext context) {
    if (this.condition == null) {
        return;
    }

    TupleFilterVisitor visitor = new TupleFilterVisitor(this.columnRowType);
    boolean isRealtimeTable = columnRowType.getColumnByIndex(0).getColumnDesc().getTable().isStreamingTable() ;
    autoJustTimezone = isRealtimeTable && autoJustTimezone;
    visitor.setAutoJustByTimezone(autoJustTimezone);
    TupleFilter filter = this.condition.accept(visitor);

    // optimize the filter, the optimization has to be segment-irrelevant
    filter = new FilterOptimizeTransformer().transform(filter);

    Set<TblColRef> filterColumns = Sets.newHashSet();
    TupleFilter.collectColumns(filter, filterColumns);
    for (TblColRef tblColRef : filterColumns) {
        if (!tblColRef.isInnerColumn() && context.belongToContextTables(tblColRef)) {
            context.allColumns.add(tblColRef);
            context.filterColumns.add(tblColRef);
        }
    }

    context.filter = and(context.filter, filter);
}
 
Example 3
Source File: FragmentFileSearcher.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
public TupleFilter transform(TupleFilter filter) {
    if (filter.getOperator() == TupleFilter.FilterOperatorEnum.NOT
            && !TupleFilter.isEvaluableRecursively(filter)) {
        TupleFilter.collectColumns(filter, unEvaluableColumnCollector);
        return ConstantTupleFilter.TRUE;
    }

    // shortcut for unEvaluatable filter
    if (!filter.isEvaluable()) {
        TupleFilter.collectColumns(filter, unEvaluableColumnCollector);
        return ConstantTupleFilter.TRUE;
    }

    if (filter instanceof CompareTupleFilter) {
        return translateCompareFilter((CompareTupleFilter) filter);
    } else if (filter instanceof LogicalTupleFilter) {
        @SuppressWarnings("unchecked")
        ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) filter.getChildren().listIterator();
        while (childIterator.hasNext()) {
            TupleFilter transformed = transform(childIterator.next());
            if (transformed != null) {
                childIterator.set(transformed);
            } else {
                throw new IllegalStateException("Should not be null");
            }
        }
    }
    return filter;
}
 
Example 4
Source File: StreamingBuiltInFunctionTransformer.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
public TupleFilter transform(TupleFilter tupleFilter) {
    TupleFilter translated = null;
    if (tupleFilter instanceof CompareTupleFilter) {
        //normal case
        translated = translateCompareTupleFilter((CompareTupleFilter) tupleFilter);
        if (translated != null) {
            logger.info("Translated {" + tupleFilter + "}");
        }
    } else if (tupleFilter instanceof BuiltInFunctionTupleFilter) {
        //like case
        translated = translateFunctionTupleFilter((BuiltInFunctionTupleFilter) tupleFilter);
        if (translated != null) {
            logger.info("Translated {" + tupleFilter + "}");
        }
    } else if (tupleFilter instanceof LogicalTupleFilter) {
        @SuppressWarnings("unchecked")
        ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) tupleFilter.getChildren()
                .listIterator();
        while (childIterator.hasNext()) {
            TupleFilter transformed = transform(childIterator.next());
            if (transformed != null)
                childIterator.set(transformed);
        }
    }

    TupleFilter result = translated == null ? tupleFilter : translated;
    if (result.getOperator() == TupleFilter.FilterOperatorEnum.NOT
            && !TupleFilter.isEvaluableRecursively(result)) {
        TupleFilter.collectColumns(result, unEvaluableColumns);
        return ConstantTupleFilter.TRUE;
    }

    // shortcut for unEvaluatable filter
    if (!result.isEvaluable()) {
        TupleFilter.collectColumns(result, unEvaluableColumns);
        return ConstantTupleFilter.TRUE;
    }
    return result;
}
 
Example 5
Source File: GTFilterScanner.java    From kylin with Apache License 2.0 5 votes vote down vote up
private ImmutableBitSet collectColumnsInFilter(TupleFilter filter) {
    Set<TblColRef> columnsInFilter = new HashSet<>();
    TupleFilter.collectColumns(filter, columnsInFilter);
    BitSet result = new BitSet();
    for (TblColRef col : columnsInFilter)
        result.set(col.getColumnDesc().getZeroBasedIndex());
    return new ImmutableBitSet(result);
}
 
Example 6
Source File: GTUtil.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
public TupleFilter onSerialize(TupleFilter filter) {
    if (filter == null)
        return null;

    // In case of NOT(unEvaluatableFilter), we should immediately replace it as TRUE,
    // Otherwise, unEvaluatableFilter will later be replace with TRUE and NOT(unEvaluatableFilter)
    // will always return FALSE.
    if (filter.getOperator() == FilterOperatorEnum.NOT && !TupleFilter.isEvaluableRecursively(filter)) {
        TupleFilter.collectColumns(filter, unevaluatableColumnCollector);
        return ConstantTupleFilter.TRUE;
    }

    // shortcut for unEvaluatable filter
    if (!filter.isEvaluable()) {
        TupleFilter.collectColumns(filter, unevaluatableColumnCollector);
        return ConstantTupleFilter.TRUE;
    }

    // map to column onto grid table
    if (colMapping != null && filter instanceof ColumnTupleFilter) {
        ColumnTupleFilter colFilter = (ColumnTupleFilter) filter;
        int gtColIdx = mapCol(colFilter.getColumn());
        return new ColumnTupleFilter(info.colRef(gtColIdx));
    }

    // encode constants
    if (useEncodeConstants && filter instanceof CompareTupleFilter) {
        return encodeConstants((CompareTupleFilter) filter);
    }

    return filter;
}
 
Example 7
Source File: GTCubeStorageQueryBase.java    From kylin with Apache License 2.0 5 votes vote down vote up
private TupleFilter checkHavingCanPushDown(TupleFilter havingFilter, Set<TblColRef> groupsD,
        List<FunctionDesc> aggregations, Set<FunctionDesc> metrics) {
    // must have only one segment
    Segments<CubeSegment> readySegs = cubeInstance.getSegments(SegmentStatusEnum.READY);
    if (readySegs.size() != 1) {
        logger.info("Can not push down having filter, must have only one segment");
        return null;
    }
    // sharded-by column must on group by
    CubeDesc desc = cubeInstance.getDescriptor();
    Set<TblColRef> shardBy = desc.getShardByColumns();
    if (groupsD == null || shardBy.isEmpty() || !groupsD.containsAll(shardBy))
        return null;

    // OK, push down
    logger.info("Push down having filter {}", havingFilter);

    // convert columns in the filter
    Set<TblColRef> aggrOutCols = new HashSet<>();
    TupleFilter.collectColumns(havingFilter, aggrOutCols);

    for (TblColRef aggrOutCol : aggrOutCols) {
        int aggrIdxOnSql = aggrOutCol.getColumnDesc().getZeroBasedIndex(); // aggr index marked in OLAPAggregateRel
        FunctionDesc aggrFunc = aggregations.get(aggrIdxOnSql);

        // calculate the index of this aggr among all the metrics that is sending to storage
        int aggrIdxAmongMetrics = 0;
        for (MeasureDesc m : cubeDesc.getMeasures()) {
            if (aggrFunc.equals(m.getFunction()))
                break;
            if (metrics.contains(m.getFunction()))
                aggrIdxAmongMetrics++;
        }
        aggrOutCol.getColumnDesc().setId("" + (aggrIdxAmongMetrics + 1));
    }
    return havingFilter;
}
 
Example 8
Source File: ExpressionColCollector.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
public TupleExpression visitCaseCall(CaseTupleExpression caseExpr) {
    for (Pair<TupleFilter, TupleExpression> entry : caseExpr.getWhenList()) {
        TupleFilter filter = entry.getFirst();
        if (ifFCols) {
            TupleFilter.collectColumns(filter, filterColumns);
        }

        entry.getSecond().accept(this);
    }
    if (caseExpr.getElseExpr() != null) {
        caseExpr.getElseExpr().accept(this);
    }
    return caseExpr;
}
 
Example 9
Source File: FragmentFileSearcher.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Override
public TupleFilter transform(TupleFilter filter) {
    if (filter.getOperator() == TupleFilter.FilterOperatorEnum.NOT
            && !TupleFilter.isEvaluableRecursively(filter)) {
        TupleFilter.collectColumns(filter, unEvaluableColumnCollector);
        return ConstantTupleFilter.TRUE;
    }

    // shortcut for unEvaluatable filter
    if (!filter.isEvaluable()) {
        TupleFilter.collectColumns(filter, unEvaluableColumnCollector);
        return ConstantTupleFilter.TRUE;
    }

    if (filter instanceof CompareTupleFilter) {
        return translateCompareFilter((CompareTupleFilter) filter);
    } else if (filter instanceof LogicalTupleFilter) {
        @SuppressWarnings("unchecked")
        ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) filter.getChildren().listIterator();
        while (childIterator.hasNext()) {
            TupleFilter transformed = transform(childIterator.next());
            if (transformed != null) {
                childIterator.set(transformed);
            } else {
                throw new IllegalStateException("Should not be null");
            }
        }
    }
    return filter;
}
 
Example 10
Source File: StreamingBuiltInFunctionTransformer.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Override
public TupleFilter transform(TupleFilter tupleFilter) {
    TupleFilter translated = null;
    if (tupleFilter instanceof CompareTupleFilter) {
        //normal case
        translated = translateCompareTupleFilter((CompareTupleFilter) tupleFilter);
        if (translated != null) {
            logger.info("Translated {" + tupleFilter + "}");
        }
    } else if (tupleFilter instanceof BuiltInFunctionTupleFilter) {
        //like case
        translated = translateFunctionTupleFilter((BuiltInFunctionTupleFilter) tupleFilter);
        if (translated != null) {
            logger.info("Translated {" + tupleFilter + "}");
        }
    } else if (tupleFilter instanceof LogicalTupleFilter) {
        @SuppressWarnings("unchecked")
        ListIterator<TupleFilter> childIterator = (ListIterator<TupleFilter>) tupleFilter.getChildren()
                .listIterator();
        while (childIterator.hasNext()) {
            TupleFilter transformed = transform(childIterator.next());
            if (transformed != null)
                childIterator.set(transformed);
        }
    }

    TupleFilter result = translated == null ? tupleFilter : translated;
    if (result.getOperator() == TupleFilter.FilterOperatorEnum.NOT
            && !TupleFilter.isEvaluableRecursively(result)) {
        TupleFilter.collectColumns(result, unEvaluableColumns);
        return ConstantTupleFilter.TRUE;
    }

    // shortcut for unEvaluatable filter
    if (!result.isEvaluable()) {
        TupleFilter.collectColumns(result, unEvaluableColumns);
        return ConstantTupleFilter.TRUE;
    }
    return result;
}
 
Example 11
Source File: GTFilterScanner.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
private ImmutableBitSet collectColumnsInFilter(TupleFilter filter) {
    Set<TblColRef> columnsInFilter = new HashSet<>();
    TupleFilter.collectColumns(filter, columnsInFilter);
    BitSet result = new BitSet();
    for (TblColRef col : columnsInFilter)
        result.set(col.getColumnDesc().getZeroBasedIndex());
    return new ImmutableBitSet(result);
}
 
Example 12
Source File: GTUtil.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Override
public TupleFilter onSerialize(TupleFilter filter) {
    if (filter == null)
        return null;

    // In case of NOT(unEvaluatableFilter), we should immediately replace it as TRUE,
    // Otherwise, unEvaluatableFilter will later be replace with TRUE and NOT(unEvaluatableFilter)
    // will always return FALSE.
    if (filter.getOperator() == FilterOperatorEnum.NOT && !TupleFilter.isEvaluableRecursively(filter)) {
        TupleFilter.collectColumns(filter, unevaluatableColumnCollector);
        return ConstantTupleFilter.TRUE;
    }

    // shortcut for unEvaluatable filter
    if (!filter.isEvaluable()) {
        TupleFilter.collectColumns(filter, unevaluatableColumnCollector);
        return ConstantTupleFilter.TRUE;
    }

    // map to column onto grid table
    if (colMapping != null && filter instanceof ColumnTupleFilter) {
        ColumnTupleFilter colFilter = (ColumnTupleFilter) filter;
        int gtColIdx = mapCol(colFilter.getColumn());
        return new ColumnTupleFilter(info.colRef(gtColIdx));
    }

    // encode constants
    if (useEncodeConstants && filter instanceof CompareTupleFilter) {
        return encodeConstants((CompareTupleFilter) filter);
    }

    return filter;
}
 
Example 13
Source File: GTCubeStorageQueryBase.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
private TupleFilter checkHavingCanPushDown(TupleFilter havingFilter, Set<TblColRef> groupsD,
        List<FunctionDesc> aggregations, Set<FunctionDesc> metrics) {
    // must have only one segment
    Segments<CubeSegment> readySegs = cubeInstance.getSegments(SegmentStatusEnum.READY);
    if (readySegs.size() != 1) {
        logger.info("Can not push down having filter, must have only one segment");
        return null;
    }
    // sharded-by column must on group by
    CubeDesc desc = cubeInstance.getDescriptor();
    Set<TblColRef> shardBy = desc.getShardByColumns();
    if (groupsD == null || shardBy.isEmpty() || !groupsD.containsAll(shardBy))
        return null;

    // OK, push down
    logger.info("Push down having filter {}", havingFilter);

    // convert columns in the filter
    Set<TblColRef> aggrOutCols = new HashSet<>();
    TupleFilter.collectColumns(havingFilter, aggrOutCols);

    for (TblColRef aggrOutCol : aggrOutCols) {
        int aggrIdxOnSql = aggrOutCol.getColumnDesc().getZeroBasedIndex(); // aggr index marked in OLAPAggregateRel
        FunctionDesc aggrFunc = aggregations.get(aggrIdxOnSql);

        // calculate the index of this aggr among all the metrics that is sending to storage
        int aggrIdxAmongMetrics = 0;
        for (MeasureDesc m : cubeDesc.getMeasures()) {
            if (aggrFunc.equals(m.getFunction()))
                break;
            if (metrics.contains(m.getFunction()))
                aggrIdxAmongMetrics++;
        }
        aggrOutCol.getColumnDesc().setId("" + (aggrIdxAmongMetrics + 1));
    }
    return havingFilter;
}
 
Example 14
Source File: ExpressionColCollector.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Override
public TupleExpression visitCaseCall(CaseTupleExpression caseExpr) {
    for (Pair<TupleFilter, TupleExpression> entry : caseExpr.getWhenList()) {
        TupleFilter filter = entry.getFirst();
        if (ifFCols) {
            TupleFilter.collectColumns(filter, filterColumns);
        }

        entry.getSecond().accept(this);
    }
    if (caseExpr.getElseExpr() != null) {
        caseExpr.getElseExpr().accept(this);
    }
    return caseExpr;
}
 
Example 15
Source File: GTCubeStorageQueryBase.java    From kylin with Apache License 2.0 4 votes vote down vote up
public GTCubeStorageQueryRequest getStorageQueryRequest(StorageContext context, SQLDigest sqlDigest,
        TupleInfo returnTupleInfo) {
    context.setStorageQuery(this);

    //cope with queries with no aggregations
    RawQueryLastHacker.hackNoAggregations(sqlDigest, cubeDesc, returnTupleInfo);

    // Customized measure taking effect: e.g. allow custom measures to help raw queries
    notifyBeforeStorageQuery(sqlDigest);

    Collection<TblColRef> groups = sqlDigest.groupbyColumns;
    TupleFilter filter = sqlDigest.filter;

    // build dimension & metrics
    Set<TblColRef> dimensions = new LinkedHashSet<>();
    Set<FunctionDesc> metrics = new LinkedHashSet<>();
    buildDimensionsAndMetrics(sqlDigest, dimensions, metrics);

    // all dimensions = groups + other(like filter) dimensions
    Set<TblColRef> otherDims = Sets.newHashSet(dimensions);
    otherDims.removeAll(groups);

    // expand derived (xxxD means contains host columns only, derived columns were translated)
    Set<TblColRef> derivedPostAggregation = Sets.newHashSet();
    Set<TblColRef> groupsD = expandDerived(groups, derivedPostAggregation);
    Set<TblColRef> otherDimsD = expandDerived(otherDims, derivedPostAggregation);
    otherDimsD.removeAll(groupsD);

    // identify cuboid
    Set<TblColRef> dimensionsD = new LinkedHashSet<>();
    dimensionsD.addAll(groupsD);
    dimensionsD.addAll(otherDimsD);
    Cuboid cuboid = findCuboid(cubeInstance, dimensionsD, metrics);
    context.setCuboid(cuboid);

    // set cuboid to GridTable mapping
    boolean noDynamicCols;
    // dynamic dimensions
    List<TblColRef> dynGroups = Lists.newArrayList(sqlDigest.dynGroupbyColumns.keySet());
    noDynamicCols = dynGroups.isEmpty();
    List<TupleExpression> dynGroupExprs = Lists.newArrayListWithExpectedSize(sqlDigest.dynGroupbyColumns.size());
    for (TblColRef dynGroupCol : dynGroups) {
        dynGroupExprs.add(sqlDigest.dynGroupbyColumns.get(dynGroupCol));
    }
    // dynamic measures
    List<DynamicFunctionDesc> dynFuncs = sqlDigest.dynAggregations;
    noDynamicCols = noDynamicCols && dynFuncs.isEmpty();

    CuboidToGridTableMapping mapping = noDynamicCols ? new CuboidToGridTableMapping(cuboid)
            : new CuboidToGridTableMappingExt(cuboid, dynGroups, dynFuncs);
    context.setMapping(mapping);

    // set whether to aggr at storage
    Set<TblColRef> singleValuesD = findSingleValueColumns(filter);
    context.setNeedStorageAggregation(isNeedStorageAggregation(cuboid, groupsD, singleValuesD));

    // exactAggregation mean: needn't aggregation at storage and query engine both.
    boolean exactAggregation = isExactAggregation(context, cuboid, groups, otherDimsD, singleValuesD,
            derivedPostAggregation, sqlDigest.aggregations, sqlDigest.aggrSqlCalls, sqlDigest.groupByExpression);
    context.setExactAggregation(exactAggregation);

    // replace derived columns in filter with host columns; columns on loosened condition must be added to group by
    Set<TblColRef> loosenedColumnD = Sets.newHashSet();
    Set<TblColRef> filterColumnD = Sets.newHashSet();
    TupleFilter filterD = translateDerived(filter, loosenedColumnD);
    groupsD.addAll(loosenedColumnD);
    TupleFilter.collectColumns(filterD, filterColumnD);
    context.setFilterMask(getQueryFilterMask(filterColumnD));

    // set limit push down
    enableStorageLimitIfPossible(cuboid, groups, dynGroups, derivedPostAggregation, groupsD, filterD,
            loosenedColumnD, sqlDigest, context);
    // set whether to aggregate results from multiple partitions
    enableStreamAggregateIfBeneficial(cuboid, groupsD, context);
    // check query deadline
    QueryContextFacade.current().checkMillisBeforeDeadline();

    // push down having clause filter if possible
    TupleFilter havingFilter = checkHavingCanPushDown(sqlDigest.havingFilter, groupsD, sqlDigest.aggregations,
            metrics);

    logger.info(
            "Cuboid identified: cube={}, cuboidId={}, groupsD={}, filterD={}, limitPushdown={}, limitLevel={}, storageAggr={}",
            cubeInstance.getName(), cuboid.getId(), groupsD, filterColumnD, context.getFinalPushDownLimit(),
            context.getStorageLimitLevel(), context.isNeedStorageAggregation());

    return new GTCubeStorageQueryRequest(cuboid, dimensionsD, groupsD, dynGroups, dynGroupExprs, filterColumnD,
            metrics, dynFuncs, filterD, havingFilter, context);
}
 
Example 16
Source File: FilterDecorator.java    From kylin with Apache License 2.0 4 votes vote down vote up
@Override
public TupleFilter onSerialize(TupleFilter filter) {
    if (filter == null)
        return null;

    BuiltInFunctionTransformer translator = new BuiltInFunctionTransformer(dimEncMap);
    filter = translator.transform(filter);

    // un-evaluatable filter is replaced with TRUE
    if (!filter.isEvaluable()) {
        TupleFilter.collectColumns(filter, inevaluableColumns);
        return ConstantTupleFilter.TRUE;
    }

    if (!(filter instanceof CompareTupleFilter))
        return filter;

    // double check all internal of CompareTupleFilter is evaluatable
    if (!TupleFilter.isEvaluableRecursively(filter)) {
        TupleFilter.collectColumns(filter, inevaluableColumns);
        return ConstantTupleFilter.TRUE;
    }

    if (filterConstantsTreatment == FilterConstantsTreatment.AS_IT_IS) {
        return filter;
    } else {

        // extract ColumnFilter & ConstantFilter
        CompareTupleFilter compareFilter = (CompareTupleFilter) filter;
        TblColRef col = compareFilter.getColumn();

        if (col == null) {
            return filter;
        }

        Collection<String> constValues = (Collection<String>) compareFilter.getValues();
        if (constValues == null || constValues.isEmpty()) {
            return filter;
        }

        CompareTupleFilter newCompareFilter = new CompareTupleFilter(compareFilter.getOperator());
        newCompareFilter.addChild(new ColumnTupleFilter(col));

        if (filterConstantsTreatment == FilterConstantsTreatment.REPLACE_WITH_GLOBAL_DICT) {
            return replaceConstantsWithGlobalDict(compareFilter, newCompareFilter);
        } else if (filterConstantsTreatment == FilterConstantsTreatment.REPLACE_WITH_LOCAL_DICT) {
            return replaceConstantsWithLocalDict(compareFilter, newCompareFilter);
        } else {
            throw new RuntimeException("should not reach here");
        }
    }
}
 
Example 17
Source File: FilterDecorator.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
@Override
public TupleFilter onSerialize(TupleFilter filter) {
    if (filter == null)
        return null;

    BuiltInFunctionTransformer translator = new BuiltInFunctionTransformer(dimEncMap);
    filter = translator.transform(filter);

    // un-evaluatable filter is replaced with TRUE
    if (!filter.isEvaluable()) {
        TupleFilter.collectColumns(filter, inevaluableColumns);
        return ConstantTupleFilter.TRUE;
    }

    if (!(filter instanceof CompareTupleFilter))
        return filter;

    // double check all internal of CompareTupleFilter is evaluatable
    if (!TupleFilter.isEvaluableRecursively(filter)) {
        TupleFilter.collectColumns(filter, inevaluableColumns);
        return ConstantTupleFilter.TRUE;
    }

    if (filterConstantsTreatment == FilterConstantsTreatment.AS_IT_IS) {
        return filter;
    } else {

        // extract ColumnFilter & ConstantFilter
        CompareTupleFilter compareFilter = (CompareTupleFilter) filter;
        TblColRef col = compareFilter.getColumn();

        if (col == null) {
            return filter;
        }

        Collection<String> constValues = (Collection<String>) compareFilter.getValues();
        if (constValues == null || constValues.isEmpty()) {
            return filter;
        }

        CompareTupleFilter newCompareFilter = new CompareTupleFilter(compareFilter.getOperator());
        newCompareFilter.addChild(new ColumnTupleFilter(col));

        if (filterConstantsTreatment == FilterConstantsTreatment.REPLACE_WITH_GLOBAL_DICT) {
            return replaceConstantsWithGlobalDict(compareFilter, newCompareFilter);
        } else if (filterConstantsTreatment == FilterConstantsTreatment.REPLACE_WITH_LOCAL_DICT) {
            return replaceConstantsWithLocalDict(compareFilter, newCompareFilter);
        } else {
            throw new RuntimeException("should not reach here");
        }
    }
}
 
Example 18
Source File: GTCubeStorageQueryBase.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
public GTCubeStorageQueryRequest getStorageQueryRequest(StorageContext context, SQLDigest sqlDigest,
        TupleInfo returnTupleInfo) {
    context.setStorageQuery(this);

    //cope with queries with no aggregations
    RawQueryLastHacker.hackNoAggregations(sqlDigest, cubeDesc, returnTupleInfo);

    // Customized measure taking effect: e.g. allow custom measures to help raw queries
    notifyBeforeStorageQuery(sqlDigest);

    Collection<TblColRef> groups = sqlDigest.groupbyColumns;
    TupleFilter filter = sqlDigest.filter;

    // build dimension & metrics
    Set<TblColRef> dimensions = new LinkedHashSet<>();
    Set<FunctionDesc> metrics = new LinkedHashSet<>();
    buildDimensionsAndMetrics(sqlDigest, dimensions, metrics);

    // all dimensions = groups + other(like filter) dimensions
    Set<TblColRef> otherDims = Sets.newHashSet(dimensions);
    otherDims.removeAll(groups);

    // expand derived (xxxD means contains host columns only, derived columns were translated)
    Set<TblColRef> derivedPostAggregation = Sets.newHashSet();
    Set<TblColRef> groupsD = expandDerived(groups, derivedPostAggregation);
    Set<TblColRef> otherDimsD = expandDerived(otherDims, derivedPostAggregation);
    otherDimsD.removeAll(groupsD);

    // identify cuboid
    Set<TblColRef> dimensionsD = new LinkedHashSet<>();
    dimensionsD.addAll(groupsD);
    dimensionsD.addAll(otherDimsD);
    Cuboid cuboid = findCuboid(cubeInstance, dimensionsD, metrics);
    context.setCuboid(cuboid);

    // set cuboid to GridTable mapping
    boolean noDynamicCols;
    // dynamic dimensions
    List<TblColRef> dynGroups = Lists.newArrayList(sqlDigest.dynGroupbyColumns.keySet());
    noDynamicCols = dynGroups.isEmpty();
    List<TupleExpression> dynGroupExprs = Lists.newArrayListWithExpectedSize(sqlDigest.dynGroupbyColumns.size());
    for (TblColRef dynGroupCol : dynGroups) {
        dynGroupExprs.add(sqlDigest.dynGroupbyColumns.get(dynGroupCol));
    }
    // dynamic measures
    List<DynamicFunctionDesc> dynFuncs = sqlDigest.dynAggregations;
    noDynamicCols = noDynamicCols && dynFuncs.isEmpty();

    CuboidToGridTableMapping mapping = noDynamicCols ? new CuboidToGridTableMapping(cuboid)
            : new CuboidToGridTableMappingExt(cuboid, dynGroups, dynFuncs);
    context.setMapping(mapping);

    // set whether to aggr at storage
    Set<TblColRef> singleValuesD = findSingleValueColumns(filter);
    context.setNeedStorageAggregation(isNeedStorageAggregation(cuboid, groupsD, singleValuesD));

    // exactAggregation mean: needn't aggregation at storage and query engine both.
    boolean exactAggregation = isExactAggregation(context, cuboid, groups, otherDimsD, singleValuesD,
            derivedPostAggregation, sqlDigest.aggregations, sqlDigest.aggrSqlCalls, sqlDigest.groupByExpression);
    context.setExactAggregation(exactAggregation);

    // replace derived columns in filter with host columns; columns on loosened condition must be added to group by
    Set<TblColRef> loosenedColumnD = Sets.newHashSet();
    Set<TblColRef> filterColumnD = Sets.newHashSet();
    TupleFilter filterD = translateDerived(filter, loosenedColumnD);
    groupsD.addAll(loosenedColumnD);
    TupleFilter.collectColumns(filterD, filterColumnD);
    context.setFilterMask(getQueryFilterMask(filterColumnD));

    // set limit push down
    enableStorageLimitIfPossible(cuboid, groups, dynGroups, derivedPostAggregation, groupsD, filterD,
            loosenedColumnD, sqlDigest, context);
    // set whether to aggregate results from multiple partitions
    enableStreamAggregateIfBeneficial(cuboid, groupsD, context);
    // check query deadline
    QueryContextFacade.current().checkMillisBeforeDeadline();

    // push down having clause filter if possible
    TupleFilter havingFilter = checkHavingCanPushDown(sqlDigest.havingFilter, groupsD, sqlDigest.aggregations,
            metrics);

    logger.info(
            "Cuboid identified: cube={}, cuboidId={}, groupsD={}, filterD={}, limitPushdown={}, limitLevel={}, storageAggr={}",
            cubeInstance.getName(), cuboid.getId(), groupsD, filterColumnD, context.getFinalPushDownLimit(),
            context.getStorageLimitLevel(), context.isNeedStorageAggregation());

    return new GTCubeStorageQueryRequest(cuboid, dimensionsD, groupsD, dynGroups, dynGroupExprs, filterColumnD,
            metrics, dynFuncs, filterD, havingFilter, context);
}