Java Code Examples for org.apache.kylin.metadata.model.FunctionDesc#getRewriteFieldName()

The following examples show how to use org.apache.kylin.metadata.model.FunctionDesc#getRewriteFieldName() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: OLAPTable.java    From Kylin with Apache License 2.0 6 votes vote down vote up
private List<ColumnDesc> listSourceColumns() {
    ProjectManager mgr = ProjectManager.getInstance(olapSchema.getConfig());
    List<ColumnDesc> exposedColumns = Lists.newArrayList(mgr.listExposedColumns(olapSchema.getProjectName(), sourceTable.getIdentity()));

    List<MeasureDesc> countMeasures = mgr.listEffectiveRewriteMeasures(olapSchema.getProjectName(), sourceTable.getIdentity());
    HashSet<String> metFields = new HashSet<String>();
    for (MeasureDesc m : countMeasures) {
        FunctionDesc func = m.getFunction();
        String fieldName = func.getRewriteFieldName();
        if (!metFields.contains(fieldName)) {
            metFields.add(fieldName);
            ColumnDesc fakeCountCol = new ColumnDesc();
            fakeCountCol.setName(fieldName);
            fakeCountCol.setDatatype(func.getSQLType());
            fakeCountCol.setNullable(false);
            fakeCountCol.init(sourceTable);
            exposedColumns.add(fakeCountCol);
        }
    }

    return exposedColumns;
}
 
Example 2
Source File: OLAPAggregateRel.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
TblColRef buildRewriteColumn(FunctionDesc aggFunc) {
    TblColRef colRef;
    if (aggFunc.needRewriteField()) {
        String colName = aggFunc.getRewriteFieldName();
        colRef = this.context.firstTableScan.makeRewriteColumn(colName);
    } else {
        throw new IllegalStateException("buildRewriteColumn on a aggrFunc that does not need rewrite " + aggFunc);
    }
    return colRef;
}
 
Example 3
Source File: OLAPTable.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
private List<ColumnDesc> listSourceColumns() {
    ProjectManager mgr = ProjectManager.getInstance(olapSchema.getConfig());

    List<ColumnDesc> tableColumns = mgr.listExposedColumns(olapSchema.getProjectName(), sourceTable, exposeMore);

    List<ColumnDesc> metricColumns = Lists.newArrayList();
    List<MeasureDesc> countMeasures = mgr.listEffectiveRewriteMeasures(olapSchema.getProjectName(),
            sourceTable.getIdentity());
    HashSet<String> metFields = new HashSet<String>();
    for (MeasureDesc m : countMeasures) {

        FunctionDesc func = m.getFunction();
        String fieldName = func.getRewriteFieldName();
        if (!metFields.contains(fieldName)) {
            metFields.add(fieldName);
            ColumnDesc fakeCountCol = func.newFakeRewriteColumn(sourceTable);
            metricColumns.add(fakeCountCol);
        }
    }

    Collections.sort(tableColumns, new Comparator<ColumnDesc>() {
        @Override
        public int compare(ColumnDesc o1, ColumnDesc o2) {
            return o1.getZeroBasedIndex() - o2.getZeroBasedIndex();
        }
    });
    return Lists.newArrayList(Iterables.concat(tableColumns, metricColumns));
}
 
Example 4
Source File: OLAPAggregateRel.java    From kylin with Apache License 2.0 5 votes vote down vote up
TblColRef buildRewriteColumn(FunctionDesc aggFunc) {
    TblColRef colRef;
    if (aggFunc.needRewriteField()) {
        String colName = aggFunc.getRewriteFieldName();
        colRef = this.context.firstTableScan.makeRewriteColumn(colName);
    } else {
        throw new IllegalStateException("buildRewriteColumn on a aggrFunc that does not need rewrite " + aggFunc);
    }
    return colRef;
}
 
Example 5
Source File: OLAPTable.java    From kylin with Apache License 2.0 5 votes vote down vote up
private List<ColumnDesc> listSourceColumns() {
    ProjectManager mgr = ProjectManager.getInstance(olapSchema.getConfig());

    List<ColumnDesc> tableColumns = mgr.listExposedColumns(olapSchema.getProjectName(), sourceTable, exposeMore);

    List<ColumnDesc> metricColumns = Lists.newArrayList();
    List<MeasureDesc> countMeasures = mgr.listEffectiveRewriteMeasures(olapSchema.getProjectName(),
            sourceTable.getIdentity());
    HashSet<String> metFields = new HashSet<String>();
    for (MeasureDesc m : countMeasures) {

        FunctionDesc func = m.getFunction();
        String fieldName = func.getRewriteFieldName();
        if (!metFields.contains(fieldName)) {
            metFields.add(fieldName);
            ColumnDesc fakeCountCol = func.newFakeRewriteColumn(sourceTable);
            metricColumns.add(fakeCountCol);
        }
    }

    Collections.sort(tableColumns, new Comparator<ColumnDesc>() {
        @Override
        public int compare(ColumnDesc o1, ColumnDesc o2) {
            return o1.getZeroBasedIndex() - o2.getZeroBasedIndex();
        }
    });
    return Lists.newArrayList(Iterables.concat(tableColumns, metricColumns));
}
 
Example 6
Source File: OLAPAggregateRel.java    From Kylin with Apache License 2.0 5 votes vote down vote up
private AggregateCall rewriteAggregateCall(AggregateCall aggCall, FunctionDesc func) {

        // rebuild parameters
        List<Integer> newArgList = new ArrayList<Integer>(1);
        String fieldName = func.getRewriteFieldName();
        RelDataTypeField field = getChild().getRowType().getField(fieldName, true);
        newArgList.add(field.getIndex());

        // rebuild function
        RelDataType fieldType = aggCall.getType();
        Aggregation newAgg = aggCall.getAggregation();
        if (func.isCountDistinct()) {
            newAgg = createHyperLogLogAggFunction(fieldType);
        } else if (func.isCount()) {
            newAgg = new SqlSumEmptyIsZeroAggFunction(fieldType);
        }

        // rebuild aggregate call
        AggregateCall newAggCall = new AggregateCall(newAgg, false, newArgList, fieldType, newAgg.getName());

        // To make sure specified type matches the inferReturnType, or otherwise
        // there will be assertion failure in optiq
        // The problem is BIGINT != BIGINT NOT NULL
        // Details see https://github.scm.corp.ebay.com/Kylin/Kylin/issues/323
        SqlAggFunction aggFunction = (SqlAggFunction) newAggCall.getAggregation();
        AggCallBinding callBinding = newAggCall.createBinding(this);
        RelDataType inferReturnType = aggFunction.inferReturnType(callBinding);

        return new AggregateCall(newAgg, false, newArgList, inferReturnType, newAgg.getName());
    }
 
Example 7
Source File: OLAPAggregateRel.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
ColumnRowType buildColumnRowType() {
    buildGroups();
    buildAggregations();

    ColumnRowType inputColumnRowType = ((OLAPRel) getInput()).getColumnRowType();
    List<TblColRef> columns = new ArrayList<TblColRef>(this.rowType.getFieldCount());
    columns.addAll(this.groups);

    // Add group column indicators
    if (indicator) {
        final Set<String> containedNames = Sets.newHashSet();
        for (TblColRef groupCol : groups) {
            String base = "i$" + groupCol.getName();
            String name = base;
            int i = 0;
            while (containedNames.contains(name)) {
                name = base + "_" + i++;
            }
            containedNames.add(name);
            TblColRef indicatorCol = TblColRef.newInnerColumn(name, TblColRef.InnerDataTypeEnum.LITERAL);
            columns.add(indicatorCol);
        }
    }

    for (int i = 0; i < this.aggregations.size(); i++) {
        FunctionDesc aggFunc = this.aggregations.get(i);
        String aggOutName;
        if (aggFunc != null) {
            aggOutName = aggFunc.getRewriteFieldName();
        } else {
            AggregateCall aggCall = this.rewriteAggCalls.get(i);
            int index = aggCall.getArgList().get(0);
            aggOutName = getSqlFuncName(aggCall) + "_"
                    + inputColumnRowType.getColumnByIndex(index).getIdentity().replace('.', '_') + "_";
        }
        TblColRef aggOutCol = TblColRef.newInnerColumn(aggOutName, TblColRef.InnerDataTypeEnum.LITERAL);
        aggOutCol.getColumnDesc().setId("" + (i + 1)); // mark the index of aggregation
        columns.add(aggOutCol);
    }
    return new ColumnRowType(columns);
}
 
Example 8
Source File: TopNMeasureType.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
@Override
public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo tupleInfo,
        Map<TblColRef, Dictionary<String>> dictionaryMap) {
    final List<TblColRef> literalCols = getTopNLiteralColumn(function);
    final TblColRef numericCol = getTopNNumericColumn(function);
    final int[] literalTupleIdx = new int[literalCols.size()];
    final DimensionEncoding[] dimensionEncodings = getDimensionEncodings(function, literalCols, dictionaryMap);
    for (int i = 0; i < literalCols.size(); i++) {
        TblColRef colRef = literalCols.get(i);
        literalTupleIdx[i] = tupleInfo.hasColumn(colRef) ? tupleInfo.getColumnIndex(colRef) : -1;
    }

    // for TopN, the aggr must be SUM
    final int numericTupleIdx;
    if (numericCol != null) {
        FunctionDesc sumFunc = FunctionDesc.newInstance(FunctionDesc.FUNC_SUM,
                ParameterDesc.newInstance(numericCol), numericCol.getType().toString());
        String sumFieldName = sumFunc.getRewriteFieldName();
        numericTupleIdx = tupleInfo.hasField(sumFieldName) ? tupleInfo.getFieldIndex(sumFieldName) : -1;
    } else {
        FunctionDesc countFunction = FunctionDesc.newInstance(FunctionDesc.FUNC_COUNT,
                ParameterDesc.newInstance("1"), "bigint");
        numericTupleIdx = tupleInfo.getFieldIndex(countFunction.getRewriteFieldName());
    }
    return new IAdvMeasureFiller() {
        private TopNCounter<ByteArray> topNCounter;
        private Iterator<Counter<ByteArray>> topNCounterIterator;
        private int expectRow = 0;

        @SuppressWarnings("unchecked")
        @Override
        public void reload(Object measureValue) {
            this.topNCounter = (TopNCounter<ByteArray>) measureValue;
            this.topNCounterIterator = topNCounter.iterator();
            this.expectRow = 0;
        }

        @Override
        public int getNumOfRows() {
            return topNCounter.size();
        }

        @Override
        public void fillTuple(Tuple tuple, int row) {
            if (expectRow++ != row)
                throw new IllegalStateException();

            Counter<ByteArray> counter = topNCounterIterator.next();
            int offset = counter.getItem().offset();
            for (int i = 0; i < dimensionEncodings.length; i++) {
                String colValue = dimensionEncodings[i].decode(counter.getItem().array(), offset,
                        dimensionEncodings[i].getLengthOfEncoding());
                tuple.setDimensionValue(literalTupleIdx[i], colValue);
                offset += dimensionEncodings[i].getLengthOfEncoding();
            }
            tuple.setMeasureValue(numericTupleIdx, counter.getCount());
        }
    };
}
 
Example 9
Source File: CubeTupleConverter.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
public CubeTupleConverter(CubeSegment cubeSeg, Cuboid cuboid, //
        Set<TblColRef> selectedDimensions, Set<FunctionDesc> selectedMetrics, int[] gtColIdx, TupleInfo returnTupleInfo) {
    this.cubeSeg = cubeSeg;
    this.cuboid = cuboid;
    this.gtColIdx = gtColIdx;
    this.tupleInfo = returnTupleInfo;
    this.derivedColFillers = Lists.newArrayList();

    nSelectedDims = selectedDimensions.size();
    tupleIdx = new int[selectedDimensions.size() + selectedMetrics.size()];

    // measure types don't have this many, but aligned length make programming easier
    measureTypes = new MeasureType[selectedDimensions.size() + selectedMetrics.size()];

    advMeasureFillers = Lists.newArrayListWithCapacity(1);
    advMeasureIndexInGTValues = Lists.newArrayListWithCapacity(1);
    usedLookupTables = Lists.newArrayList();
    eventTimezone = cubeSeg.getConfig().getStreamingDerivedTimeTimezone();
    autoJustByTimezone = eventTimezone.length() > 0
            && cubeSeg.getCubeDesc().getModel().getRootFactTable().getTableDesc().isStreamingTable();
    if (autoJustByTimezone) {
        logger.debug("Will ajust dimsension for Time Derived Column.");
        timeZoneOffset = TimeZone.getTimeZone(eventTimezone).getRawOffset();
    } else {
        timeZoneOffset = 0;
    }
    ////////////

    int i = 0;

    // pre-calculate dimension index mapping to tuple
    for (TblColRef dim : selectedDimensions) {
        tupleIdx[i] = tupleInfo.hasColumn(dim) ? tupleInfo.getColumnIndex(dim) : -1;
        if (TimeDerivedColumnType.isTimeDerivedColumn(dim.getName())
                && !TimeDerivedColumnType.isTimeDerivedColumnAboveDayLevel(dim.getName())) {
            timestampColumn.add(tupleIdx[i]);
        }
        i++;
    }

    for (FunctionDesc metric : selectedMetrics) {
        if (metric.needRewrite()) {
            String rewriteFieldName = metric.getRewriteFieldName();
            tupleIdx[i] = tupleInfo.hasField(rewriteFieldName) ? tupleInfo.getFieldIndex(rewriteFieldName) : -1;
        } else {
            // a non-rewrite metrics (like sum, or dimension playing as metrics) is like a dimension column
            TblColRef col = metric.getParameter().getColRefs().get(0);
            tupleIdx[i] = tupleInfo.hasColumn(col) ? tupleInfo.getColumnIndex(col) : -1;
        }

        MeasureType<?> measureType = metric.getMeasureType();
        if (measureType.needAdvancedTupleFilling()) {
            Map<TblColRef, Dictionary<String>> dictionaryMap = buildDictionaryMap(measureType.getColumnsNeedDictionary(metric));
            advMeasureFillers.add(measureType.getAdvancedTupleFiller(metric, returnTupleInfo, dictionaryMap));
            advMeasureIndexInGTValues.add(i);
        } else {
            measureTypes[i] = measureType;
        }

        i++;
    }

    // prepare derived columns and filler
    Map<Array<TblColRef>, List<DeriveInfo>> hostToDerivedInfo = cuboid.getCubeDesc().getHostToDerivedInfo(cuboid.getColumns(), null);
    for (Entry<Array<TblColRef>, List<DeriveInfo>> entry : hostToDerivedInfo.entrySet()) {
        TblColRef[] hostCols = entry.getKey().data;
        for (DeriveInfo deriveInfo : entry.getValue()) {
            IDerivedColumnFiller filler = newDerivedColumnFiller(hostCols, deriveInfo);
            if (filler != null) {
                derivedColFillers.add(filler);
            }
        }
    }
}
 
Example 10
Source File: StreamingTupleConverter.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
public StreamingTupleConverter(ResponseResultSchema schema, TupleInfo returnTupleInfo) {
    this.tupleInfo = returnTupleInfo;
    dimCnt = schema.getDimensionCount();
    metricsCnt = schema.getMetricsCount();
    dimTupleIdx = new int[dimCnt];
    metricsTupleIdx = new int[metricsCnt];

    // measure types don't have this many, but aligned length make programming easier
    measureTypes = new MeasureType[metricsCnt];

    advMeasureFillers = Lists.newArrayListWithCapacity(1);
    advMeasureIndexInGTValues = Lists.newArrayListWithCapacity(1);

    int idx = 0;
    // pre-calculate dimension index mapping to tuple
    for (TblColRef dim : schema.getDimensions()) {
        dimTupleIdx[idx] = tupleInfo.hasColumn(dim) ? tupleInfo.getColumnIndex(dim) : -1;
        if (dim.getType().isDateTimeFamily() && TimeDerivedColumnType.isTimeDerivedColumn(dim.getName()))
            timestampColumn.add(dimTupleIdx[idx]);
        idx++;
    }

    idx = 0;
    for (FunctionDesc metric : schema.getMetrics()) {
        if (metric.needRewrite()) {
            String rewriteFieldName = metric.getRewriteFieldName();
            metricsTupleIdx[idx] = tupleInfo.hasField(rewriteFieldName) ? tupleInfo.getFieldIndex(rewriteFieldName) : -1;
        } else { // a non-rewrite metrics (like sum, or dimension playing as metrics) is like a dimension column
            TblColRef col = metric.getParameter().getColRefs().get(0);
            metricsTupleIdx[idx] = tupleInfo.hasColumn(col) ? tupleInfo.getColumnIndex(col) : -1;
        }

        MeasureType<?> measureType = metric.getMeasureType();
        if (measureType.needAdvancedTupleFilling()) {
            advMeasureFillers.add(measureType.getAdvancedTupleFiller(metric, returnTupleInfo, null));
            advMeasureIndexInGTValues.add(idx);
        } else {
            measureTypes[idx] = measureType;
        }
        idx++;
    }
}
 
Example 11
Source File: OLAPAggregateRel.java    From kylin with Apache License 2.0 4 votes vote down vote up
ColumnRowType buildColumnRowType() {
    buildGroups();
    buildAggregations();

    ColumnRowType inputColumnRowType = ((OLAPRel) getInput()).getColumnRowType();
    List<TblColRef> columns = new ArrayList<TblColRef>(this.rowType.getFieldCount());
    columns.addAll(this.groups);

    // Add group column indicators
    if (indicator) {
        final Set<String> containedNames = Sets.newHashSet();
        for (TblColRef groupCol : groups) {
            String base = "i$" + groupCol.getName();
            String name = base;
            int i = 0;
            while (containedNames.contains(name)) {
                name = base + "_" + i++;
            }
            containedNames.add(name);
            TblColRef indicatorCol = TblColRef.newInnerColumn(name, TblColRef.InnerDataTypeEnum.LITERAL);
            columns.add(indicatorCol);
        }
    }

    for (int i = 0; i < this.aggregations.size(); i++) {
        FunctionDesc aggFunc = this.aggregations.get(i);
        String aggOutName;
        if (aggFunc != null) {
            aggOutName = aggFunc.getRewriteFieldName();
        } else {
            AggregateCall aggCall = this.rewriteAggCalls.get(i);
            int index = aggCall.getArgList().get(0);
            aggOutName = getSqlFuncName(aggCall) + "_"
                    + inputColumnRowType.getColumnByIndex(index).getIdentity().replace('.', '_') + "_";
        }
        TblColRef aggOutCol = TblColRef.newInnerColumn(aggOutName, TblColRef.InnerDataTypeEnum.LITERAL);
        aggOutCol.getColumnDesc().setId("" + (i + 1)); // mark the index of aggregation
        columns.add(aggOutCol);
    }
    return new ColumnRowType(columns);
}
 
Example 12
Source File: TopNMeasureType.java    From kylin with Apache License 2.0 4 votes vote down vote up
@Override
public IAdvMeasureFiller getAdvancedTupleFiller(FunctionDesc function, TupleInfo tupleInfo,
        Map<TblColRef, Dictionary<String>> dictionaryMap) {
    final List<TblColRef> literalCols = getTopNLiteralColumn(function);
    final TblColRef numericCol = getTopNNumericColumn(function);
    final int[] literalTupleIdx = new int[literalCols.size()];
    final DimensionEncoding[] dimensionEncodings = getDimensionEncodings(function, literalCols, dictionaryMap);
    for (int i = 0; i < literalCols.size(); i++) {
        TblColRef colRef = literalCols.get(i);
        literalTupleIdx[i] = tupleInfo.hasColumn(colRef) ? tupleInfo.getColumnIndex(colRef) : -1;
    }

    // for TopN, the aggr must be SUM
    final int numericTupleIdx;
    if (numericCol != null) {
        FunctionDesc sumFunc = FunctionDesc.newInstance(FunctionDesc.FUNC_SUM,
                ParameterDesc.newInstance(numericCol), numericCol.getType().toString());
        String sumFieldName = sumFunc.getRewriteFieldName();
        numericTupleIdx = tupleInfo.hasField(sumFieldName) ? tupleInfo.getFieldIndex(sumFieldName) : -1;
    } else {
        FunctionDesc countFunction = FunctionDesc.newInstance(FunctionDesc.FUNC_COUNT,
                ParameterDesc.newInstance("1"), "bigint");
        numericTupleIdx = tupleInfo.getFieldIndex(countFunction.getRewriteFieldName());
    }
    return new IAdvMeasureFiller() {
        private TopNCounter<ByteArray> topNCounter;
        private Iterator<Counter<ByteArray>> topNCounterIterator;
        private int expectRow = 0;

        @SuppressWarnings("unchecked")
        @Override
        public void reload(Object measureValue) {
            this.topNCounter = (TopNCounter<ByteArray>) measureValue;
            this.topNCounterIterator = topNCounter.iterator();
            this.expectRow = 0;
        }

        @Override
        public int getNumOfRows() {
            return topNCounter.size();
        }

        @Override
        public void fillTuple(Tuple tuple, int row) {
            if (expectRow++ != row)
                throw new IllegalStateException();

            Counter<ByteArray> counter = topNCounterIterator.next();
            int offset = counter.getItem().offset();
            for (int i = 0; i < dimensionEncodings.length; i++) {
                String colValue = dimensionEncodings[i].decode(counter.getItem().array(), offset,
                        dimensionEncodings[i].getLengthOfEncoding());
                tuple.setDimensionValue(literalTupleIdx[i], colValue);
                offset += dimensionEncodings[i].getLengthOfEncoding();
            }
            tuple.setMeasureValue(numericTupleIdx, counter.getCount());
        }
    };
}
 
Example 13
Source File: CubeTupleConverter.java    From kylin with Apache License 2.0 4 votes vote down vote up
public CubeTupleConverter(CubeSegment cubeSeg, Cuboid cuboid, //
        Set<TblColRef> selectedDimensions, Set<FunctionDesc> selectedMetrics, int[] gtColIdx, TupleInfo returnTupleInfo) {
    this.cubeSeg = cubeSeg;
    this.cuboid = cuboid;
    this.gtColIdx = gtColIdx;
    this.tupleInfo = returnTupleInfo;
    this.derivedColFillers = Lists.newArrayList();

    nSelectedDims = selectedDimensions.size();
    tupleIdx = new int[selectedDimensions.size() + selectedMetrics.size()];

    // measure types don't have this many, but aligned length make programming easier
    measureTypes = new MeasureType[selectedDimensions.size() + selectedMetrics.size()];

    advMeasureFillers = Lists.newArrayListWithCapacity(1);
    advMeasureIndexInGTValues = Lists.newArrayListWithCapacity(1);
    usedLookupTables = Lists.newArrayList();
    eventTimezone = cubeSeg.getConfig().getStreamingDerivedTimeTimezone();
    autoJustByTimezone = eventTimezone.length() > 0
            && cubeSeg.getCubeDesc().getModel().getRootFactTable().getTableDesc().isStreamingTable();
    if (autoJustByTimezone) {
        logger.debug("Will ajust dimsension for Time Derived Column.");
        timeZoneOffset = TimeZone.getTimeZone(eventTimezone).getRawOffset();
    } else {
        timeZoneOffset = 0;
    }
    ////////////

    int i = 0;

    // pre-calculate dimension index mapping to tuple
    for (TblColRef dim : selectedDimensions) {
        tupleIdx[i] = tupleInfo.hasColumn(dim) ? tupleInfo.getColumnIndex(dim) : -1;
        if (TimeDerivedColumnType.isTimeDerivedColumn(dim.getName())
                && !TimeDerivedColumnType.isTimeDerivedColumnAboveDayLevel(dim.getName())) {
            timestampColumn.add(tupleIdx[i]);
        }
        i++;
    }

    for (FunctionDesc metric : selectedMetrics) {
        if (metric.needRewriteField()) {
            String rewriteFieldName = metric.getRewriteFieldName();
            tupleIdx[i] = tupleInfo.hasField(rewriteFieldName) ? tupleInfo.getFieldIndex(rewriteFieldName) : -1;
        } else {
            // a non-rewrite metrics (like sum, or dimension playing as metrics) is like a dimension column
            TblColRef col = metric.getParameter().getColRefs().get(0);
            tupleIdx[i] = tupleInfo.hasColumn(col) ? tupleInfo.getColumnIndex(col) : -1;
        }

        MeasureType<?> measureType = metric.getMeasureType();
        if (measureType.needAdvancedTupleFilling()) {
            Map<TblColRef, Dictionary<String>> dictionaryMap = buildDictionaryMap(measureType.getColumnsNeedDictionary(metric));
            advMeasureFillers.add(measureType.getAdvancedTupleFiller(metric, returnTupleInfo, dictionaryMap));
            advMeasureIndexInGTValues.add(i);
        } else {
            measureTypes[i] = measureType;
        }

        i++;
    }

    // prepare derived columns and filler
    Map<Array<TblColRef>, List<DeriveInfo>> hostToDerivedInfo = cuboid.getCubeDesc().getHostToDerivedInfo(cuboid.getColumns(), null);
    for (Entry<Array<TblColRef>, List<DeriveInfo>> entry : hostToDerivedInfo.entrySet()) {
        TblColRef[] hostCols = entry.getKey().data;
        for (DeriveInfo deriveInfo : entry.getValue()) {
            IDerivedColumnFiller filler = newDerivedColumnFiller(hostCols, deriveInfo);
            if (filler != null) {
                derivedColFillers.add(filler);
            }
        }
    }

    rowKeyDesc = cubeSeg.getCubeDesc().getRowkey();
}
 
Example 14
Source File: StreamingTupleConverter.java    From kylin with Apache License 2.0 4 votes vote down vote up
public StreamingTupleConverter(ResponseResultSchema schema, TupleInfo returnTupleInfo) {
    this.tupleInfo = returnTupleInfo;
    dimCnt = schema.getDimensionCount();
    metricsCnt = schema.getMetricsCount();
    dimTupleIdx = new int[dimCnt];
    metricsTupleIdx = new int[metricsCnt];

    // measure types don't have this many, but aligned length make programming easier
    measureTypes = new MeasureType[metricsCnt];

    advMeasureFillers = Lists.newArrayListWithCapacity(1);
    advMeasureIndexInGTValues = Lists.newArrayListWithCapacity(1);

    int idx = 0;
    // pre-calculate dimension index mapping to tuple
    for (TblColRef dim : schema.getDimensions()) {
        dimTupleIdx[idx] = tupleInfo.hasColumn(dim) ? tupleInfo.getColumnIndex(dim) : -1;
        if (dim.getType().isDateTimeFamily() && TimeDerivedColumnType.isTimeDerivedColumn(dim.getName()))
            timestampColumn.add(dimTupleIdx[idx]);
        idx++;
    }

    idx = 0;
    for (FunctionDesc metric : schema.getMetrics()) {
        if (metric.needRewrite()) {
            String rewriteFieldName = metric.getRewriteFieldName();
            metricsTupleIdx[idx] = tupleInfo.hasField(rewriteFieldName) ? tupleInfo.getFieldIndex(rewriteFieldName) : -1;
        } else { // a non-rewrite metrics (like sum, or dimension playing as metrics) is like a dimension column
            TblColRef col = metric.getParameter().getColRefs().get(0);
            metricsTupleIdx[idx] = tupleInfo.hasColumn(col) ? tupleInfo.getColumnIndex(col) : -1;
        }

        MeasureType<?> measureType = metric.getMeasureType();
        if (measureType.needAdvancedTupleFilling()) {
            advMeasureFillers.add(measureType.getAdvancedTupleFiller(metric, returnTupleInfo, null));
            advMeasureIndexInGTValues.add(idx);
        } else {
            measureTypes[idx] = measureType;
        }
        idx++;
    }
}