Java Code Examples for org.apache.kylin.metadata.model.FunctionDesc

The following examples show how to use org.apache.kylin.metadata.model.FunctionDesc. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: Kylin   Source File: OLAPAggregateRel.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void implementRewrite(RewriteImplementor implementor) {
    implementor.visitChild(this, getChild());

    // only rewrite the first aggregation
    if (!this.afterAggregate && RewriteImplementor.needRewrite(this.context)) {
        // rewrite the aggCalls
        this.rewriteAggCalls = new ArrayList<AggregateCall>(aggCalls.size());
        for (int i = 0; i < this.aggCalls.size(); i++) {
            AggregateCall aggCall = this.aggCalls.get(i);
            FunctionDesc cubeFunc = this.context.aggregations.get(i);
            if (cubeFunc.needRewrite()) {
                aggCall = rewriteAggregateCall(aggCall, cubeFunc);
            }
            this.rewriteAggCalls.add(aggCall);
        }
    }

    // rebuild rowType & columnRowType
    this.rowType = this.deriveRowType();
    this.columnRowType = this.buildColumnRowType();

}
 
Example 2
Source Project: kylin   Source File: CuboidToGridTableMapping.java    License: Apache License 2.0 6 votes vote down vote up
public String[] makeAggrFuncs(Collection<? extends FunctionDesc> metrics) {

        //metrics are represented in ImmutableBitSet, which loses order information
        //sort the aggrFuns to align with metrics natural order 
        List<FunctionDesc> metricList = Lists.newArrayList(metrics);
        Collections.sort(metricList, new Comparator<FunctionDesc>() {
            @Override
            public int compare(FunctionDesc o1, FunctionDesc o2) {
                int a = getIndexOf(o1);
                int b = getIndexOf(o2);
                return a - b;
            }
        });

        String[] result = new String[metricList.size()];
        int i = 0;
        for (FunctionDesc metric : metricList) {
            result[i++] = metric.getExpression();
        }
        return result;
    }
 
Example 3
Source Project: Kylin   Source File: AdjustForWeeklyMatchedRealization.java    License: Apache License 2.0 6 votes vote down vote up
private static void convertAggregationToDimension(OLAPContext olapContext, Collection<FunctionDesc> availableAggregations, String factTableName) {
    Iterator<FunctionDesc> it = olapContext.aggregations.iterator();
    while (it.hasNext()) {
        FunctionDesc functionDesc = it.next();
        if (!availableAggregations.contains(functionDesc)) {
            // try to convert the metric to dimension to see if it works
            TblColRef col = functionDesc.selectTblColRef(olapContext.metricsColumns, factTableName);
            functionDesc.setDimensionAsMetric(true);
            olapContext.rewriteFields.remove(functionDesc.getRewriteFieldName());
            if (col != null) {
                olapContext.metricsColumns.remove(col);
                olapContext.groupByColumns.add(col);
            }
            logger.info("Adjust OLAPContext for " + functionDesc);
        }
    }
}
 
Example 4
Source Project: kylin   Source File: SequentialCubeTupleIterator.java    License: Apache License 2.0 6 votes vote down vote up
public SequentialCubeTupleIterator(List<CubeSegmentScanner> scanners, Cuboid cuboid,
        Set<TblColRef> selectedDimensions, List<TblColRef> rtGroups, Set<TblColRef> groups, //
        Set<FunctionDesc> selectedMetrics, TupleInfo returnTupleInfo, StorageContext context, SQLDigest sqlDigest) {
    this.context = context;
    this.scanners = scanners;

    Set<TblColRef> selectedDims = Sets.newHashSet(selectedDimensions);
    selectedDims.addAll(rtGroups);

    segmentCubeTupleIterators = Lists.newArrayList();
    for (CubeSegmentScanner scanner : scanners) {
        segmentCubeTupleIterators.add(new SegmentCubeTupleIterator(scanner, cuboid, selectedDims, selectedMetrics, returnTupleInfo, context));
    }

    if (context.mergeSortPartitionResults() && !sqlDigest.isRawQuery) {
        //query with limit
        logger.info("Using SortedIteratorMergerWithLimit to merge segment results");
        Iterator<Iterator<ITuple>> transformed = (Iterator<Iterator<ITuple>>) (Iterator<?>) segmentCubeTupleIterators.iterator();
        tupleIterator = new SortedIteratorMergerWithLimit<ITuple>(transformed, context.getFinalPushDownLimit(), getTupleDimensionComparator(cuboid, groups, returnTupleInfo)).getIterator();
    } else {
        //normal case
        logger.info("Using Iterators.concat to merge segment results");
        tupleIterator = Iterators.concat(segmentCubeTupleIterators.iterator());
    }
}
 
Example 5
Source Project: kylin   Source File: ExtendedColumnMeasureType.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void adjustSqlDigest(List<MeasureDesc> measureDescs, SQLDigest sqlDigest) {
    for (MeasureDesc measureDesc : measureDescs) {
        if (!sqlDigest.involvedMeasure.contains(measureDesc)) {
            continue;
        }
        FunctionDesc extendColumnFunc = measureDesc.getFunction();
        List<TblColRef> hosts = getExtendedColumnHosts(extendColumnFunc);
        TblColRef extended = getExtendedColumn(extendColumnFunc);

        if (!sqlDigest.groupbyColumns.contains(extended)) {
            continue;
        }

        sqlDigest.aggregations.add(extendColumnFunc);
        sqlDigest.groupbyColumns.remove(extended);
        sqlDigest.groupbyColumns.addAll(hosts);
        sqlDigest.metricColumns.add(extended);
    }
}
 
Example 6
Source Project: kylin-on-parquet-v2   Source File: CubeDescCreator.java    License: Apache License 2.0 6 votes vote down vote up
public static MeasureDesc getMeasureSum(String column, String dataType) {
    ParameterDesc parameterDesc = new ParameterDesc();
    parameterDesc.setValue(column);
    parameterDesc.setType(FunctionDesc.PARAMETER_TYPE_COLUMN);

    FunctionDesc function = new FunctionDesc();
    function.setExpression(FunctionDesc.FUNC_SUM);
    function.setParameter(parameterDesc);
    function.setReturnType(dataType.equals(HiveTableCreator.HiveTypeEnum.HDOUBLE.toString())
            ? HiveTableCreator.HiveTypeEnum.HDECIMAL.toString()
            : dataType);

    MeasureDesc result = new MeasureDesc();
    result.setName(column + "_SUM");
    result.setFunction(function);
    return result;
}
 
Example 7
Source Project: Kylin   Source File: OLAPAggregateRel.java    License: Apache License 2.0 6 votes vote down vote up
private ColumnRowType buildColumnRowType() {
    buildGroups();
    buildAggregations();

    ColumnRowType inputColumnRowType = ((OLAPRel) getChild()).getColumnRowType();
    List<TblColRef> columns = new ArrayList<TblColRef>(this.rowType.getFieldCount());
    columns.addAll(this.groups);

    for (int i = 0; i < this.aggregations.size(); i++) {
        FunctionDesc aggFunc = this.aggregations.get(i);
        TblColRef aggCol = null;
        if (aggFunc.needRewrite()) {
            aggCol = buildRewriteColumn(aggFunc);
        } else {
            AggregateCall aggCall = this.rewriteAggCalls.get(i);
            if (!aggCall.getArgList().isEmpty()) {
                int index = aggCall.getArgList().get(0);
                aggCol = inputColumnRowType.getColumnByIndex(index);
            }
        }
        columns.add(aggCol);
    }
    return new ColumnRowType(columns);
}
 
Example 8
Source Project: kylin   Source File: BasicMeasureType.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void validate(FunctionDesc functionDesc) throws IllegalArgumentException {
    DataType rtype = dataType;

    if (funcName.equals(FunctionDesc.FUNC_SUM)) {
        if (rtype.isNumberFamily() == false) {
            throw new IllegalArgumentException("Return type for function " + funcName + " must be one of " + DataType.NUMBER_FAMILY);
        }
    } else if (funcName.equals(FunctionDesc.FUNC_COUNT)) {
        if (rtype.isIntegerFamily() == false) {
            throw new IllegalArgumentException("Return type for function " + funcName + " must be one of " + DataType.INTEGER_FAMILY);
        }
    } else if (funcName.equals(FunctionDesc.FUNC_MAX) || funcName.equals(FunctionDesc.FUNC_MIN)) {
        if (rtype.isNumberFamily() == false) {
            throw new IllegalArgumentException("Return type for function " + funcName + " must be one of " + DataType.NUMBER_FAMILY);
        }
    } else {
        KylinConfig config = KylinConfig.getInstanceFromEnv();
        if (config.isQueryIgnoreUnknownFunction() == false)
            throw new IllegalArgumentException("Unrecognized function: [" + funcName + "]");
    }
}
 
Example 9
Source Project: kylin   Source File: CubeDesc.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("deprecation")
private void initMeasureColumns() {
    if (measures == null || measures.isEmpty()) {
        return;
    }

    for (MeasureDesc m : measures) {
        m.setName(m.getName().toUpperCase(Locale.ROOT));

        if (m.getDependentMeasureRef() != null) {
            m.setDependentMeasureRef(m.getDependentMeasureRef().toUpperCase(Locale.ROOT));
        }

        FunctionDesc func = m.getFunction();
        func.init(model);
        allColumns.addAll(func.getParameter().getColRefs());

        if (ExtendedColumnMeasureType.FUNC_EXTENDED_COLUMN.equalsIgnoreCase(m.getFunction().getExpression())) {
            FunctionDesc functionDesc = m.getFunction();

            List<TblColRef> hosts = ExtendedColumnMeasureType.getExtendedColumnHosts(functionDesc);
            TblColRef extendedColumn = ExtendedColumnMeasureType.getExtendedColumn(functionDesc);
            initExtendedColumnMap(hosts.toArray(new TblColRef[hosts.size()]), extendedColumn);
        }
    }
}
 
Example 10
Source Project: kylin   Source File: StreamingCubeDataSearcherPerfTest.java    License: Apache License 2.0 6 votes vote down vote up
private void iiSearch(int time) throws IOException {
    System.out.println("start " + time + " invertIndex search");
    Stopwatch sw = Stopwatch.createUnstarted();
    sw.start();
    Set<TblColRef> dimensions = testHelper.simulateDimensions("STREAMING_V2_TABLE.MINUTE_START",
            "STREAMING_V2_TABLE.ITM");
    Set<TblColRef> groups = testHelper.simulateDimensions();
    Set<FunctionDesc> metrics = Sets.newHashSet(testHelper.simulateCountMetric());
    long startTime = DateFormat.stringToMillis("2018-07-30 07:00:00");
    long endTime = DateFormat.stringToMillis("2018-07-30 09:00:00");
    TupleFilter timeFilter = testHelper.buildTimeRangeFilter("STREAMING_V2_TABLE.MINUTE_START",
            String.valueOf(startTime), String.valueOf(endTime));
    TupleFilter itemFilter = testHelper.buildEQFilter("STREAMING_V2_TABLE.ITM", "ITM0000000000");
    TupleFilter filter = testHelper.buildAndFilter(timeFilter, itemFilter);
    StreamingSearchContext searchRequest = new StreamingSearchContext(parsedStreamingCubeInfo.cubeDesc, dimensions,
            groups, metrics, filter, null);
    IStreamingSearchResult searchResult = searcher.doSearch(searchRequest, 0L, true);
    for (Record record : searchResult) {
        System.out.println(record);
    }
    sw.stop();
    long takeTime = sw.elapsed(MILLISECONDS);
    System.out.println(time + " search finished, took:" + takeTime);
}
 
Example 11
@Test
public void testOneValueAggregation() {
    genEvents(80000);
    StreamingCubeDataSearcher searcher = streamingSegmentManager.getSearcher();
    String startTimeStr = "2018-07-30 20:00:00";
    long startTime = DateFormat.stringToMillis(startTimeStr);
    String endTimeStr = "2018-07-30 20:04:00";
    long endTime = DateFormat.stringToMillis(endTimeStr);
    CompareTupleFilter filter1 = testHelper.buildCompareFilter("STREAMING_V2_TABLE.MINUTE_START",
        FilterOperatorEnum.GTE, startTimeStr);
    CompareTupleFilter filter2 = testHelper.buildCompareFilter("STREAMING_V2_TABLE.MINUTE_START",
        FilterOperatorEnum.LT, endTimeStr);
    TupleFilter filter = testHelper.buildAndFilter(filter1, filter2);
    Set<FunctionDesc> metrics = Sets.newHashSet(testHelper.simulateCountMetric());
    Set<TblColRef> dimensions = testHelper.simulateDimensions("STREAMING_V2_TABLE.MINUTE_START");
    Set<TblColRef> groups = Sets.newHashSet();
    StreamingSearchContext searchRequest = new StreamingSearchContext(cubeDesc, dimensions, groups, metrics, filter,
        null);
    IStreamingSearchResult segmentResults1 = searcher.doSearch(searchRequest, -1, true);
    for (Record record : segmentResults1) {
        long minStart = Long.valueOf(record.getDimensions()[0]);
        assertTrue(startTime <= minStart && minStart < endTime);
        System.out.println(record);
    }
}
 
Example 12
Source Project: kylin-on-parquet-v2   Source File: MeasureTypeFactory.java    License: Apache License 2.0 6 votes vote down vote up
private static void registerUDAF(MeasureTypeFactory<?> factory) {
    MeasureType<?> type = factory.createMeasureType(factory.getAggrFunctionName(),
            DataType.getType(factory.getAggrDataTypeName()));
    Map<String, Class<?>> udafs = type.getRewriteCalciteAggrFunctions();
    if (type.needRewrite() == false || udafs == null)
        return;

    for (String udaf : udafs.keySet()) {
        udaf = udaf.toUpperCase(Locale.ROOT);
        if (udaf.equals(FunctionDesc.FUNC_COUNT_DISTINCT))
            continue; // skip built-in function

        if (udafFactories.containsKey(udaf))
            throw new IllegalStateException(
                    "UDAF '" + udaf + "' was dup declared by " + udafFactories.get(udaf) + " and " + factory);

        udafFactories.put(udaf, factory);
        udafMap.put(udaf, udafs.get(udaf));
    }
}
 
Example 13
@Test
public void testSearchSpecificCuboid() throws Exception {
    StreamingQueryProfile profile = new StreamingQueryProfile("test-query-id", System.currentTimeMillis());
    StreamingQueryProfile.set(profile);

    setBuildAdditionalCuboids();
    int eventCnt = 50000;
    prepareDataToMemoryStore(eventCnt);
    Set<TblColRef> dimensions = simulateColumns("SITE");
    Set<TblColRef> groups = Sets.newHashSet();
    Set<FunctionDesc> metrics = simulateMetrics();
    StreamingSearchContext searchRequest = new StreamingSearchContext(cubeDesc, dimensions, groups, metrics,
            null, null);
    assertEquals(1L, searchRequest.getHitCuboid());
    ResultCollector resultCollector = new SingleThreadResultCollector();
    memoryStore.search(searchRequest, resultCollector);
    int returnRecordCnt = 0;
    int returnColNum = 0;
    for (Record record : resultCollector) {
        returnRecordCnt++;
        returnColNum = record.getDimensions().length + record.getMetrics().length;
    }
    assertEquals(10, returnRecordCnt);
    assertEquals(2, returnColNum);
}
 
Example 14
Source Project: Kylin   Source File: CubeStorageEngine.java    License: Apache License 2.0 6 votes vote down vote up
private void buildDimensionsAndMetrics(Collection<TblColRef> dimensions, Collection<FunctionDesc> metrics, SQLDigest sqlDigest) {

        for (FunctionDesc func : sqlDigest.aggregations) {
            if (!func.isDimensionAsMetric()) {
                metrics.add(func);
            }
        }

        for (TblColRef column : sqlDigest.allColumns) {
            // skip measure columns
            if (sqlDigest.metricColumns.contains(column)) {
                continue;
            }
            dimensions.add(column);
        }
    }
 
Example 15
Source Project: kylin   Source File: SegmentMemoryStoreTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSearchSpecificCuboid() throws Exception {
    StreamingQueryProfile profile = new StreamingQueryProfile("test-query-id", System.currentTimeMillis());
    StreamingQueryProfile.set(profile);

    setBuildAdditionalCuboids();
    int eventCnt = 50000;
    prepareDataToMemoryStore(eventCnt);
    Set<TblColRef> dimensions = simulateColumns("SITE");
    Set<TblColRef> groups = Sets.newHashSet();
    Set<FunctionDesc> metrics = simulateMetrics();
    StreamingSearchContext searchRequest = new StreamingSearchContext(cubeDesc, dimensions, groups, metrics,
            null, null);
    assertEquals(1L, searchRequest.getHitCuboid());
    ResultCollector resultCollector = new SingleThreadResultCollector();
    memoryStore.search(searchRequest, resultCollector);
    int returnRecordCnt = 0;
    int returnColNum = 0;
    for (Record record : resultCollector) {
        returnRecordCnt++;
        returnColNum = record.getDimensions().length + record.getMetrics().length;
    }
    assertEquals(10, returnRecordCnt);
    assertEquals(2, returnColNum);
}
 
Example 16
Source Project: Kylin   Source File: EndpointTupleIterator.java    License: Apache License 2.0 6 votes vote down vote up
private TupleInfo buildTupleInfo() {
    TupleInfo info = new TupleInfo();
    int index = 0;

    for (int i = 0; i < columns.size(); i++) {
        TblColRef column = columns.get(i);
        //            if (!dimensions.contains(column)) {
        //                continue;
        //            }
        info.setField(columnNames.get(i), columns.get(i), columns.get(i).getType().getName(), index++);
    }

    for (FunctionDesc measure : measures) {
        info.setField(measure.getRewriteFieldName(), null, measure.getSQLType(), index++);
    }

    return info;
}
 
Example 17
Source Project: kylin   Source File: TopNMeasureType.java    License: Apache License 2.0 6 votes vote down vote up
private boolean isTopNCompatibleSum(FunctionDesc topN, FunctionDesc sum) {
    if (sum == null)
        return false;

    if (!isTopN(topN))
        return false;

    TblColRef topnNumCol = getTopNNumericColumn(topN);

    if (topnNumCol == null) {
        if (sum.isCount())
            return true;

        return false;
    }

    if (sum.isSum() == false)
        return false;

    if (sum.getParameter() == null || sum.getParameter().getColRefs() == null
            || sum.getParameter().getColRefs().size() == 0)
        return false;

    TblColRef sumCol = sum.getParameter().getColRefs().get(0);
    return sumCol.equals(topnNumCol);
}
 
Example 18
Source Project: kylin   Source File: QueryGenerator.java    License: Apache License 2.0 6 votes vote down vote up
public static String createMeasureStatement(List<MeasureDesc> measureList) {
    StringBuilder sql = new StringBuilder();

    for (MeasureDesc measureDesc : measureList) {
        FunctionDesc functionDesc = measureDesc.getFunction();
        if (functionDesc.isSum() || functionDesc.isMax() || functionDesc.isMin()) {
            sql.append("," + functionDesc.getExpression() + "(" + functionDesc.getParameter().getValue() + ")\n");
            break;
        } else if (functionDesc.isCountDistinct()) {
            sql.append(",COUNT" + "(DISTINCT " + functionDesc.getParameter().getValue() + ")\n");
            break;
        }
    }

    return sql.toString();
}
 
Example 19
public String[] makeAggrFuncs(Collection<FunctionDesc> metrics) {

        //metrics are represented in ImmutableBitSet, which loses order information
        //sort the aggrFuns to align with metrics natural order 
        List<FunctionDesc> metricList = Lists.newArrayList(metrics);
        Collections.sort(metricList, new Comparator<FunctionDesc>() {
            @Override
            public int compare(FunctionDesc o1, FunctionDesc o2) {
                int a = getIndexOf(o1);
                int b = getIndexOf(o2);
                return a - b;
            }
        });

        String[] result = new String[metricList.size()];
        int i = 0;
        for (FunctionDesc metric : metricList) {
            result[i++] = metric.getExpression();
        }
        return result;
    }
 
Example 20
Source Project: kylin-on-parquet-v2   Source File: RawMeasureType.java    License: Apache License 2.0 6 votes vote down vote up
public CapabilityResult.CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions, Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, final MeasureDesc measureDesc) {
    //is raw query
    if (!digest.isRawQuery)
        return null;

    TblColRef rawColumn = getRawColumn(measureDesc.getFunction());
    if (!digest.allColumns.isEmpty() && !digest.allColumns.contains(rawColumn)) {
        return null;
    }

    unmatchedAggregations.remove(measureDesc.getFunction());

    //contain one raw measure : cost * 0.9
    return new CapabilityResult.CapabilityInfluence() {
        @Override
        public double suggestCostMultiplier() {
            return 0.9;
        }

        @Override
        public MeasureDesc getInvolvedMeasure() {
            return measureDesc;
        }
    };
}
 
Example 21
public GTCubeStorageQueryRequest(Cuboid cuboid, Set<TblColRef> dimensions, //
        Set<TblColRef> groups, List<TblColRef> dynGroups, List<TupleExpression> dynGroupExprs, //
        Set<TblColRef> filterCols, Set<FunctionDesc> metrics, List<DynamicFunctionDesc> dynFuncs, //
        TupleFilter filter, TupleFilter havingFilter, StorageContext context) {
    this.cuboid = cuboid;
    this.dimensions = dimensions;
    this.groups = groups;
    this.dynGroups = dynGroups;
    this.dynGroupExprs = dynGroupExprs;
    this.filterCols = filterCols;
    this.metrics = metrics;
    this.dynFuncs = dynFuncs;
    this.filter = filter;
    this.havingFilter = havingFilter;
    this.context = context;
}
 
Example 22
Source Project: kylin   Source File: StorageMockUtils.java    License: Apache License 2.0 6 votes vote down vote up
public List<FunctionDesc> buildAggregations() {
    List<FunctionDesc> functions = new ArrayList<FunctionDesc>();

    TblColRef priceCol = model.findColumn("DEFAULT.TEST_KYLIN_FACT.PRICE");
    TblColRef sellerCol = model.findColumn("DEFAULT.TEST_KYLIN_FACT.SELLER_ID");

    FunctionDesc f1 = FunctionDesc.newInstance("SUM", //
            ParameterDesc.newInstance(priceCol), "decimal(19,4)");
    functions.add(f1);

    FunctionDesc f2 = FunctionDesc.newInstance("COUNT_DISTINCT", //
            ParameterDesc.newInstance(sellerCol), "hllc(10)");
    functions.add(f2);

    return functions;
}
 
Example 23
public SegmentCubeTupleIterator(CubeSegmentScanner scanner, Cuboid cuboid, Set<TblColRef> selectedDimensions, //
        Set<FunctionDesc> selectedMetrics, TupleInfo returnTupleInfo, StorageContext context) {
    this.scanner = scanner;
    this.cuboid = cuboid;
    this.selectedDimensions = selectedDimensions;
    this.selectedMetrics = selectedMetrics;
    this.tupleInfo = returnTupleInfo;
    this.tuple = new Tuple(returnTupleInfo);
    this.context = context;

    CuboidToGridTableMapping mapping = context.getMapping();
    int[] gtDimsIdx = mapping.getDimIndexes(selectedDimensions);
    int[] gtMetricsIdx = mapping.getMetricsIndexes(selectedMetrics);
    // gtColIdx = gtDimsIdx + gtMetricsIdx
    int[] gtColIdx = new int[gtDimsIdx.length + gtMetricsIdx.length];
    System.arraycopy(gtDimsIdx, 0, gtColIdx, 0, gtDimsIdx.length);
    System.arraycopy(gtMetricsIdx, 0, gtColIdx, gtDimsIdx.length, gtMetricsIdx.length);

    this.gtValues = getGTValuesIterator(scanner.iterator(), scanner.getScanRequest(), gtDimsIdx, gtMetricsIdx);
    this.cubeTupleConverter = ((GTCubeStorageQueryBase) context.getStorageQuery()).newCubeTupleConverter(
            scanner.cubeSeg, cuboid, selectedDimensions, selectedMetrics, gtColIdx, tupleInfo);
}
 
Example 24
@Before
public void setUp() throws Exception {
    this.createTestMetadata();

    cube = getTestKylinCubeWithSeller();
    cubeDesc = cube.getDescriptor();

    dimensions = Sets.newHashSet();
    metrics = Lists.newArrayList();
    for (MeasureDesc measureDesc : cubeDesc.getMeasures()) {
        Collections.addAll(metrics, measureDesc.getFunction());
    }

    FunctionDesc mockUpFuncDesc = new FunctionDesc();
    Field field = FunctionDesc.class.getDeclaredField("measureType");
    field.setAccessible(true);
    field.set(mockUpFuncDesc, new MockUpMeasureType());
    metrics.add(mockUpFuncDesc);
}
 
Example 25
Source Project: kylin-on-parquet-v2   Source File: TestHelper.java    License: Apache License 2.0 5 votes vote down vote up
public FunctionDesc simulateCountMetric() {
    FunctionDesc f1 = new FunctionDesc();
    f1.setExpression("COUNT");
    ParameterDesc p1 = ParameterDesc.newInstance("1");
    f1.setParameter(p1);
    f1.setReturnType("bigint");
    return f1;
}
 
Example 26
Source Project: kylin   Source File: BitmapMapMeasureType.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public List<TblColRef> getColumnsNeedDictionary(FunctionDesc functionDesc) {
    if (needDictionaryColumn(functionDesc)) {
        return Collections.singletonList(functionDesc.getParameter().getColRefs().get(0));
    } else {
        return Collections.emptyList();
    }
}
 
Example 27
Source Project: kylin   Source File: ITStorageTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void test01() {
    List<TblColRef> groups = mockup.buildGroups();
    List<FunctionDesc> aggregations = mockup.buildAggregations();
    TupleFilter filter = mockup.buildFilter1(groups.get(0));

    int count = search(groups, aggregations, filter, context);
    assertTrue(count >= 0);
}
 
Example 28
Source Project: kylin   Source File: FragmentFilesMergerTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testMerge2() throws Exception {
    int fragmentNum = 4;
    int eventCntPerMin = 100000;
    StreamingDataSimulator simulator = new StreamingDataSimulator(getTestCardinalityMap(), eventCntPerMin);
    List<DataSegmentFragment> fragments = createFragmentFiles(5, fragmentNum, simulator);
    int originRowCnt = fragmentNum * 50000;
    FragmentsMergeResult mergeResult = fragmentFilesMerger.merge(fragments);
    File mergedFragmentMetaFile = mergeResult.getMergedFragmentMetaFile();
    File mergedFragmentDataFile = mergeResult.getMergedFragmentDataFile();
    FragmentMetaInfo fragmentMetaInfo = JsonUtil.readValue(mergedFragmentMetaFile, FragmentMetaInfo.class);
    assertEquals(160000, fragmentMetaInfo.getNumberOfRows());
    assertEquals(originRowCnt, fragmentMetaInfo.getOriginNumOfRows());

    FragmentData fragmentData = new FragmentData(fragmentMetaInfo, mergedFragmentDataFile);
    Set<TblColRef> dims = testHelper.simulateDimensions("STREAMING_V2_TABLE.MINUTE_START");
    TupleFilter siteFilter = null;
    Set<TblColRef> groups = testHelper.simulateDimensions("STREAMING_V2_TABLE.MINUTE_START");
    Set<FunctionDesc> metrics = Sets.newHashSet(testHelper.simulateCountMetric());

    StreamingQueryProfile.set(new StreamingQueryProfile("test-query-id", System.currentTimeMillis()));
    DataSegmentFragment fragment = new DataSegmentFragment(baseStorePath, cubeName, segmentName, new FragmentId(0));
    FragmentFileSearcher fragmentFileSearcher = new FragmentFileSearcher(fragment, fragmentData);
    StreamingSearchContext searchRequest = new StreamingSearchContext(parsedStreamingCubeInfo.cubeDesc, dims,
            groups, metrics, siteFilter, null);
    ResultCollector resultCollector = new SingleThreadResultCollector();
    fragmentFileSearcher.search(searchRequest, resultCollector);
    int rowCnt = 0;
    int totalOriginCnt = 0;
    for (Record record : resultCollector) {
        rowCnt++;
        long everyMinuteCnt = (Long) record.getMetrics()[0];
        assertEquals(eventCntPerMin, (int) everyMinuteCnt);
        System.out.println(record);
        totalOriginCnt += everyMinuteCnt;
    }
    assertEquals(2, rowCnt);
    assertEquals(originRowCnt, totalOriginCnt);
}
 
Example 29
Source Project: Kylin   Source File: BaseCuboidMapper.java    License: Apache License 2.0 5 votes vote down vote up
private byte[] getValueBytes(SplittedBytes[] splitBuffers, int measureIdx) {
    MeasureDesc desc = cubeDesc.getMeasures().get(measureIdx);
    FunctionDesc func = desc.getFunction();
    ParameterDesc paramDesc = func.getParameter();
    int[] flatTableIdx = intermediateTableDesc.getMeasureColumnIndexes()[measureIdx];

    byte[] result = null;

    // constant
    if (flatTableIdx == null) {
        result = Bytes.toBytes(paramDesc.getValue());
    }
    // column values
    else {
        // for multiple columns, their values are joined
        for (int i = 0; i < flatTableIdx.length; i++) {
            SplittedBytes split = splitBuffers[flatTableIdx[i]];
            if (result == null) {
                result = Arrays.copyOf(split.value, split.length);
            } else {
                byte[] newResult = new byte[result.length + split.length];
                System.arraycopy(result, 0, newResult, 0, result.length);
                System.arraycopy(split.value, 0, newResult, result.length, split.length);
                result = newResult;
            }
        }
    }

    if (func.isCount() || func.isHolisticCountDistinct()) {
        // note for holistic count distinct, this value will be ignored
        result = ONE;
    }

    if (isNull(result)) {
        result = null;
    }

    return result;
}
 
Example 30
Source Project: kylin-on-parquet-v2   Source File: OLAPAggregateRel.java    License: Apache License 2.0 5 votes vote down vote up
TblColRef buildRewriteColumn(FunctionDesc aggFunc) {
    TblColRef colRef;
    if (aggFunc.needRewriteField()) {
        String colName = aggFunc.getRewriteFieldName();
        colRef = this.context.firstTableScan.makeRewriteColumn(colName);
    } else {
        throw new IllegalStateException("buildRewriteColumn on a aggrFunc that does not need rewrite " + aggFunc);
    }
    return colRef;
}