org.apache.kylin.metadata.model.TblColRef Java Examples

The following examples show how to use org.apache.kylin.metadata.model.TblColRef. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: OLAPJoinRel.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
protected JoinDesc buildJoin(RexCall condition) {
    Multimap<TblColRef, TblColRef> joinColumns = HashMultimap.create();
    translateJoinColumn(condition, joinColumns);

    List<String> pks = new ArrayList<String>();
    List<TblColRef> pkCols = new ArrayList<TblColRef>();
    List<String> fks = new ArrayList<String>();
    List<TblColRef> fkCols = new ArrayList<TblColRef>();
    for (Map.Entry<TblColRef, TblColRef> columnPair : joinColumns.entries()) {
        TblColRef fromCol = columnPair.getKey();
        TblColRef toCol = columnPair.getValue();
        fks.add(fromCol.getName());
        fkCols.add(fromCol);
        pks.add(toCol.getName());
        pkCols.add(toCol);
    }

    JoinDesc join = new JoinDesc();
    join.setForeignKey(fks.toArray(COLUMN_ARRAY_MARKER));
    join.setForeignKeyColumns(fkCols.toArray(new TblColRef[fkCols.size()]));
    join.setPrimaryKey(pks.toArray(COLUMN_ARRAY_MARKER));
    join.setPrimaryKeyColumns(pkCols.toArray(new TblColRef[pkCols.size()]));
    join.sortByFK();
    return join;
}
 
Example #2
Source File: CubeManager.java    From Kylin with Apache License 2.0 6 votes vote down vote up
/**
 * return null if no dictionary for given column
 */
public Dictionary<?> getDictionary(CubeSegment cubeSeg, TblColRef col) {
    DictionaryInfo info = null;
    try {
        DictionaryManager dictMgr = getDictionaryManager();
        // logger.info("Using metadata url " + metadataUrl +
        // " for DictionaryManager");
        String dictResPath = cubeSeg.getDictResPath(col);
        if (dictResPath == null)
            return null;

        info = dictMgr.getDictionaryInfo(dictResPath);
        if (info == null)
            throw new IllegalStateException("No dictionary found by " + dictResPath + ", invalid cube state; cube segment" + cubeSeg + ", col " + col);
    } catch (IOException e) {
        throw new IllegalStateException("Failed to get dictionary for cube segment" + cubeSeg + ", col" + col, e);
    }

    return info.getDictionaryObject();
}
 
Example #3
Source File: GTCubeStorageQueryBase.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
protected TupleFilter translateDerived(TupleFilter filter, Set<TblColRef> collector) {
    if (filter == null)
        return filter;

    if (filter instanceof CompareTupleFilter) {
        return translateDerivedInCompare((CompareTupleFilter) filter, collector);
    }

    List<TupleFilter> children = (List<TupleFilter>) filter.getChildren();
    List<TupleFilter> newChildren = Lists.newArrayListWithCapacity(children.size());
    boolean modified = false;
    for (TupleFilter child : children) {
        TupleFilter translated = translateDerived(child, collector);
        newChildren.add(translated);
        if (child != translated)
            modified = true;
    }
    if (modified) {
        filter = replaceChildren(filter, newChildren);
    }
    return filter;
}
 
Example #4
Source File: TableSchemaUpdateChecker.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
/**
 * check whether all columns used in `cube` has compatible schema in current hive schema denoted by `fieldsMap`.
 * @param cube cube to check, must use `table` in its model
 * @param origTable kylin's table metadata
 * @param newTable current hive schema of `table`
 * @return columns in origTable that can't be found in newTable
 */
private List<String> checkAllColumnsInCube(CubeInstance cube, TableDesc origTable, TableDesc newTable) {
    Set<ColumnDesc> usedColumns = Sets.newHashSet();
    for (TblColRef col : cube.getAllColumns()) {
        usedColumns.add(col.getColumnDesc());
    }

    List<String> violateColumns = Lists.newArrayList();
    for (ColumnDesc column : origTable.getColumns()) {
        if (!column.isComputedColumn() && usedColumns.contains(column)) {
            ColumnDesc newCol = newTable.findColumnByName(column.getName());
            if (newCol == null || !isColumnCompatible(column, newCol)) {
                violateColumns.add(column.getName());
            }
        }
    }
    return violateColumns;
}
 
Example #5
Source File: BaseCuboidMapperBase.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Override
protected void doSetup(Context context) throws IOException {
    super.bindCurrentConfiguration(context.getConfiguration());
    cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME);
    segmentID = context.getConfiguration().get(BatchConstants.CFG_CUBE_SEGMENT_ID);
    final KylinConfig kylinConfig = AbstractHadoopJob.loadKylinPropsAndMetadata();
    cube = CubeManager.getInstance(kylinConfig).getCube(cubeName);
    cubeDesc = cube.getDescriptor();
    cubeSegment = cube.getSegmentById(segmentID);
    CubeJoinedFlatTableEnrich intermediateTableDesc = new CubeJoinedFlatTableEnrich(EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc);

    Map<TblColRef, Dictionary<String>> dictionaryMap = DictionaryGetterUtil.getDictionaryMap(cubeSegment,
            context.getInputSplit(), context.getConfiguration());

    baseCuboidBuilder = new BaseCuboidBuilder(kylinConfig, cubeDesc, cubeSegment, intermediateTableDesc,
            dictionaryMap);
}
 
Example #6
Source File: OLAPSortRel.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Override
public void implementOLAP(OLAPImplementor implementor) {
    implementor.fixSharedOlapTableScan(this);
    implementor.visitChild(getInput(), this);

    this.context = implementor.getContext();
    this.columnRowType = buildColumnRowType();

    for (RelFieldCollation fieldCollation : this.collation.getFieldCollations()) {
        int index = fieldCollation.getFieldIndex();
        SQLDigest.OrderEnum order = getOrderEnum(fieldCollation.getDirection());
        OLAPRel olapChild = (OLAPRel) this.getInput();
        TblColRef orderCol = olapChild.getColumnRowType().getAllColumns().get(index);
        this.context.addSort(orderCol, order);
        this.context.storageContext.markSort();

    }
}
 
Example #7
Source File: ColumnarMemoryStorePersister.java    From kylin with Apache License 2.0 6 votes vote down vote up
private Map<TblColRef, Dictionary<String>> buildAndPersistDictionaries(FragmentMetaInfo fragmentMetaInfo,
        List<List<Object>> allColumnarValues, CountingOutputStream fragmentOut) throws IOException {
    Map<TblColRef, Dictionary<String>> dictMaps = Maps.newHashMap();
    List<DimDictionaryMetaInfo> dimDictionaryMetaInfos = Lists.newArrayList();
    for (int i = 0; i < dimensions.length; i++) {
        TblColRef dimension = dimensions[i];
        List<Object> dimValueList = allColumnarValues.get(i);
        Dictionary<String> dict;
        DimDictionaryMetaInfo dimDictionaryMetaInfo = new DimDictionaryMetaInfo();
        if (dimensionsUseDictEncoding.contains(dimension)) {
            dict = buildDictionary(dimension, dimValueList);
            dictMaps.put(dimension, dict);

            dimDictionaryMetaInfo.setDimName(dimension.getName());
            dimDictionaryMetaInfo.setDictType(dict.getClass().getName());
            dimDictionaryMetaInfo.setStartOffset((int) fragmentOut.getCount());

            DictionarySerializer.serialize(dict, fragmentOut);
            dimDictionaryMetaInfo.setDictLength((int) fragmentOut.getCount()
                    - dimDictionaryMetaInfo.getStartOffset());
            dimDictionaryMetaInfos.add(dimDictionaryMetaInfo);
        }
    }
    fragmentMetaInfo.setDimDictionaryMetaInfos(dimDictionaryMetaInfos);
    return dictMaps;
}
 
Example #8
Source File: OLAPAggregateRel.java    From Kylin with Apache License 2.0 6 votes vote down vote up
private ColumnRowType buildColumnRowType() {
    buildGroups();
    buildAggregations();

    ColumnRowType inputColumnRowType = ((OLAPRel) getChild()).getColumnRowType();
    List<TblColRef> columns = new ArrayList<TblColRef>(this.rowType.getFieldCount());
    columns.addAll(this.groups);

    for (int i = 0; i < this.aggregations.size(); i++) {
        FunctionDesc aggFunc = this.aggregations.get(i);
        TblColRef aggCol = null;
        if (aggFunc.needRewrite()) {
            aggCol = buildRewriteColumn(aggFunc);
        } else {
            AggregateCall aggCall = this.rewriteAggCalls.get(i);
            if (!aggCall.getArgList().isEmpty()) {
                int index = aggCall.getArgList().get(0);
                aggCol = inputColumnRowType.getColumnByIndex(index);
            }
        }
        columns.add(aggCol);
    }
    return new ColumnRowType(columns);
}
 
Example #9
Source File: CubeDesc.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
public List<TblColRef> getAllGlobalDictColumns() {
    List<TblColRef> globalDictCols = new ArrayList<TblColRef>();
    List<DictionaryDesc> dictionaryDescList = getDictionaries();

    if (dictionaryDescList == null) {
        return globalDictCols;
    }

    for (DictionaryDesc dictionaryDesc : dictionaryDescList) {
        String cls = dictionaryDesc.getBuilderClass();
        if (GlobalDictionaryBuilder.class.getName().equals(cls)
                || SegmentAppendTrieDictBuilder.class.getName().equals(cls))
            globalDictCols.add(dictionaryDesc.getColumnRef());
    }
    return globalDictCols;
}
 
Example #10
Source File: MergeDictionaryMapper.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Override
protected void doSetup(Context context) throws IOException, InterruptedException {
    super.doSetup(context);

    final SerializableConfiguration sConf = new SerializableConfiguration(context.getConfiguration());
    final String metaUrl = context.getConfiguration().get(BatchConstants.ARG_META_URL);
    final String cubeName = context.getConfiguration().get(BatchConstants.ARG_CUBE_NAME);
    final String segmentIds = context.getConfiguration().get(MergeDictionaryJob.OPTION_MERGE_SEGMENT_IDS.getOpt());

    final KylinConfig kylinConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(sConf, metaUrl);
    final CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName);
    final CubeDesc cubeDesc = CubeDescManager.getInstance(kylinConfig).getCubeDesc(cubeInstance.getDescName());

    mergingSegments = getMergingSegments(cubeInstance, StringUtil.splitByComma(segmentIds));
    tblColRefs = cubeDesc.getAllColumnsNeedDictionaryBuilt().toArray(new TblColRef[0]);
    dictMgr = DictionaryManager.getInstance(kylinConfig);
}
 
Example #11
Source File: DictionaryEnumerator.java    From kylin with Apache License 2.0 6 votes vote down vote up
private static boolean ifColumnHaveDictionary(TblColRef col, IRealization realization, boolean enableCheck) {
    if (realization instanceof CubeInstance) {
        final CubeInstance cube = (CubeInstance) realization;
        boolean ifEnabled = !enableCheck || cube.getConfig().isDictionaryEnumeratorEnabled();
        return ifEnabled && cube.getDescriptor().getAllDimsHaveDictionary().contains(col);
    } else if (realization instanceof HybridInstance) {
        final HybridInstance hybridInstance = (HybridInstance) realization;
        for (IRealization entry : hybridInstance.getRealizations()) {
            if (!ifColumnHaveDictionary(col, entry, enableCheck)) {
                return false;
            }
        }
        return true;
    }
    return false;
}
 
Example #12
Source File: StreamingSegmentManagerTest.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Test
public void testOneValueAggregation() {
    genEvents(80000);
    StreamingCubeDataSearcher searcher = streamingSegmentManager.getSearcher();
    String startTimeStr = "2018-07-30 20:00:00";
    long startTime = DateFormat.stringToMillis(startTimeStr);
    String endTimeStr = "2018-07-30 20:04:00";
    long endTime = DateFormat.stringToMillis(endTimeStr);
    CompareTupleFilter filter1 = testHelper.buildCompareFilter("STREAMING_V2_TABLE.MINUTE_START",
        FilterOperatorEnum.GTE, startTimeStr);
    CompareTupleFilter filter2 = testHelper.buildCompareFilter("STREAMING_V2_TABLE.MINUTE_START",
        FilterOperatorEnum.LT, endTimeStr);
    TupleFilter filter = testHelper.buildAndFilter(filter1, filter2);
    Set<FunctionDesc> metrics = Sets.newHashSet(testHelper.simulateCountMetric());
    Set<TblColRef> dimensions = testHelper.simulateDimensions("STREAMING_V2_TABLE.MINUTE_START");
    Set<TblColRef> groups = Sets.newHashSet();
    StreamingSearchContext searchRequest = new StreamingSearchContext(cubeDesc, dimensions, groups, metrics, filter,
        null);
    IStreamingSearchResult segmentResults1 = searcher.doSearch(searchRequest, -1, true);
    for (Record record : segmentResults1) {
        long minStart = Long.valueOf(record.getDimensions()[0]);
        assertTrue(startTime <= minStart && minStart < endTime);
        System.out.println(record);
    }
}
 
Example #13
Source File: CubeDesc.java    From Kylin with Apache License 2.0 6 votes vote down vote up
public Map<Array<TblColRef>, List<DeriveInfo>> getHostToDerivedInfo(List<TblColRef> rowCols, Collection<TblColRef> wantedCols) {
    Map<Array<TblColRef>, List<DeriveInfo>> result = new HashMap<Array<TblColRef>, List<DeriveInfo>>();
    for (Entry<Array<TblColRef>, List<DeriveInfo>> entry : hostToDerivedMap.entrySet()) {
        Array<TblColRef> hostCols = entry.getKey();
        boolean hostOnRow = rowCols.containsAll(Arrays.asList(hostCols.data));
        if (!hostOnRow)
            continue;

        List<DeriveInfo> wantedInfo = new ArrayList<DeriveInfo>();
        for (DeriveInfo info : entry.getValue()) {
            if (wantedCols == null || Collections.disjoint(wantedCols, Arrays.asList(info.columns)) == false) // has
                                                                                                              // any
                                                                                                              // wanted
                                                                                                              // columns?
                wantedInfo.add(info);
        }

        if (wantedInfo.size() > 0)
            result.put(hostCols, wantedInfo);
    }
    return result;
}
 
Example #14
Source File: RowKeyColumnIO.java    From Kylin with Apache License 2.0 6 votes vote down vote up
public String readColumnString(TblColRef col, byte[] bytes, int bytesLen) {
    Dictionary<String> dict = getDictionary(col);
    if (dict == null) {
        bytes = Bytes.head(bytes, bytesLen);
        if (isNull(bytes)) {
            return null;
        }
        bytes = removeFixLenPad(bytes, 0);
        return Bytes.toString(bytes);
    } else {
        int id = BytesUtil.readUnsigned(bytes, 0, bytesLen);
        try {
            String value = dict.getValueFromId(id);
            return value;
        } catch (IllegalArgumentException e) {
            logger.error("Can't get dictionary value for column " + col.getName() + " (id = " + id + ")");
            return "";
        }
    }
}
 
Example #15
Source File: TupleFilterTest.java    From kylin with Apache License 2.0 6 votes vote down vote up
@Test
public void testMustTrueTupleFilter() {
    TupleFilter andFilter = new LogicalTupleFilter(TupleFilter.FilterOperatorEnum.AND);
    TupleFilter andFilter2  = new LogicalTupleFilter(TupleFilter.FilterOperatorEnum.AND);
    TupleFilter orFilter = new LogicalTupleFilter(TupleFilter.FilterOperatorEnum.OR);
    andFilter.addChild(andFilter2);
    andFilter.addChild(orFilter);

    Set<CompareTupleFilter> trueTupleFilters = andFilter.findMustTrueCompareFilters();
    Assert.assertTrue(trueTupleFilters.isEmpty());

    TupleFilter compFilter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.GT);
    compFilter.addChild(new ColumnTupleFilter(TblColRef.newInnerColumn("test1", TblColRef.InnerDataTypeEnum.LITERAL)));
    TupleFilter compFilter2 = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.GT);
    compFilter2.addChild(new ColumnTupleFilter(TblColRef.newInnerColumn("test2", TblColRef.InnerDataTypeEnum.LITERAL)));
    andFilter2.addChild(compFilter);
    orFilter.addChild(compFilter2);
    Assert.assertEquals(Sets.newHashSet(compFilter), andFilter.findMustTrueCompareFilters());
    
    Assert.assertEquals(Sets.newHashSet(compFilter2), compFilter2.findMustTrueCompareFilters());
}
 
Example #16
Source File: SparkFactDistinct.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
private void outputDict(TblColRef col, Dictionary<String> dict,
        List<Tuple2<String, Tuple3<Writable, Writable, String>>> result)
        throws IOException {
    // output written to baseDir/colName/colName.rldict-r-00000 (etc)
    String dictFileName = col.getIdentity() + "/" + col.getName() + DICT_FILE_POSTFIX;

    try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
            DataOutputStream outputStream = new DataOutputStream(baos)) {
        outputStream.writeUTF(dict.getClass().getName());
        dict.write(outputStream);

        result.add(new Tuple2<String, Tuple3<Writable, Writable, String>>(BatchConstants.CFG_OUTPUT_DICT,
                new Tuple3<Writable, Writable, String>(NullWritable.get(),
                        new ArrayPrimitiveWritable(baos.toByteArray()), dictFileName)));
    }
}
 
Example #17
Source File: RawMeasureType.java    From kylin with Apache License 2.0 6 votes vote down vote up
public CapabilityResult.CapabilityInfluence influenceCapabilityCheck(Collection<TblColRef> unmatchedDimensions, Collection<FunctionDesc> unmatchedAggregations, SQLDigest digest, final MeasureDesc measureDesc) {
    //is raw query
    if (!digest.isRawQuery)
        return null;

    TblColRef rawColumn = getRawColumn(measureDesc.getFunction());
    if (!digest.allColumns.isEmpty() && !digest.allColumns.contains(rawColumn)) {
        return null;
    }

    unmatchedAggregations.remove(measureDesc.getFunction());

    //contain one raw measure : cost * 0.9
    return new CapabilityResult.CapabilityInfluence() {
        @Override
        public double suggestCostMultiplier() {
            return 0.9;
        }

        @Override
        public MeasureDesc getInvolvedMeasure() {
            return measureDesc;
        }
    };
}
 
Example #18
Source File: ColumnarSegmentStoreTest.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
private void scanStore() throws IOException {
    Set<TblColRef> dimensions = testHelper.simulateDimensions(new String[] { "STREAMING_V2_TABLE.SITE" });
    Set<TblColRef> groups = testHelper.simulateDimensions(new String[] { "STREAMING_V2_TABLE.SITE" });
    Set<FunctionDesc> metrics = testHelper.simulateMetrics();
    StreamingSearchContext searchRequest = new StreamingSearchContext(cubeDesc, dimensions, groups, metrics,
            null, null);
    ResultCollector resultCollector = new SingleThreadResultCollector();
    segmentStore.search(searchRequest, resultCollector);
    int count = 0;
    for (Record record : resultCollector) {
        count++;
    }
    resultCollector.close();
}
 
Example #19
Source File: StreamingCubeRule.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Override
public void validate(CubeDesc cube, ValidateContext context) {
    DataModelDesc model = cube.getModel();
    
    if (model.getRootFactTable().getTableDesc().getSourceType() != ISourceAware.ID_STREAMING
            && !model.getRootFactTable().getTableDesc().isStreamingTable()) {
        return;
    }

    if (model.getPartitionDesc() == null || model.getPartitionDesc().getPartitionDateColumn() == null) {
        context.addResult(ResultLevel.ERROR, "Must define a partition column.");
        return;
    }

    final TblColRef partitionCol = model.getPartitionDesc().getPartitionDateColumnRef();
    boolean found = false;
    for (DimensionDesc dimensionDesc : cube.getDimensions()) {
        for (TblColRef dimCol : dimensionDesc.getColumnRefs()) {
            if (dimCol.equals(partitionCol)) {
                found = true;
                break;
            }
        }
    }

    if (found == false) {
        context.addResult(ResultLevel.ERROR, "Partition column '" + partitionCol + "' isn't in dimension list.");
        return;
    }

}
 
Example #20
Source File: FilterBaseTest.java    From Kylin with Apache License 2.0 5 votes vote down vote up
protected CompareTupleFilter buildCompareCaseFilter(List<TblColRef> groups, String constValue) {
    CompareTupleFilter compareFilter = new CompareTupleFilter(FilterOperatorEnum.EQ);
    CaseTupleFilter caseFilter = buildCaseFilter(groups);
    compareFilter.addChild(caseFilter);
    ConstantTupleFilter constantFilter = new ConstantTupleFilter(constValue);
    compareFilter.addChild(constantFilter);
    return compareFilter;
}
 
Example #21
Source File: HBaseStorage.java    From kylin with Apache License 2.0 5 votes vote down vote up
private static TblColRef getPartitionCol(IRealization realization) {
    String modelName = realization.getModel().getName();
    DataModelDesc dataModelDesc = DataModelManager.getInstance(KylinConfig.getInstanceFromEnv())
            .getDataModelDesc(modelName);
    PartitionDesc partitionDesc = dataModelDesc.getPartitionDesc();
    Preconditions.checkArgument(partitionDesc != null, "PartitionDesc for " + realization + " is null!");
    TblColRef partitionColRef = partitionDesc.getPartitionDateColumnRef();
    Preconditions.checkArgument(partitionColRef != null,
            "getPartitionDateColumnRef for " + realization + " is null");
    return partitionColRef;
}
 
Example #22
Source File: CubeDesc.java    From Kylin with Apache License 2.0 5 votes vote down vote up
private void initMeasureColumns(Map<String, TableDesc> tables) {
    if (measures == null || measures.isEmpty()) {
        return;
    }

    TableDesc factTable = tables.get(getFactTable());
    for (MeasureDesc m : measures) {
        m.setName(m.getName().toUpperCase());

        if (m.getDependentMeasureRef() != null) {
            m.setDependentMeasureRef(m.getDependentMeasureRef().toUpperCase());
        }
        
        FunctionDesc f = m.getFunction();
        f.setExpression(f.getExpression().toUpperCase());
        f.setReturnDataType(DataType.getInstance(f.getReturnType()));

        ParameterDesc p = f.getParameter();
        p.normalizeColumnValue();

        if (p.isColumnType()) {
            ArrayList<TblColRef> colRefs = Lists.newArrayList();
            for (String cName : p.getValue().split("\\s*,\\s*")) {
                ColumnDesc sourceColumn = factTable.findColumnByName(cName);
                TblColRef colRef = new TblColRef(sourceColumn);
                colRefs.add(colRef);
                allColumns.add(colRef);
            }
            if (colRefs.isEmpty() == false)
                p.setColRefs(colRefs);
        }
        
        // verify holistic count distinct as a dependent measure
        if (m.isHolisticCountDistinct() && StringUtils.isBlank(m.getDependentMeasureRef())) {
            throw new IllegalStateException(m + " is a holistic count distinct but it has no DependentMeasureRef defined!");
        }
    }
}
 
Example #23
Source File: FragmentFileSearcher.java    From kylin with Apache License 2.0 5 votes vote down vote up
public FragmentFilterConverter(FragmentMetaInfo fragmentMetaInfo, Set<TblColRef> unEvaluableColumnCollector,
                               Map<TblColRef, Integer> colMapping, ColumnarRecordCodec recordCodec) {
    this.unEvaluableColumnCollector = unEvaluableColumnCollector;
    this.recordCodec = recordCodec;
    this.colMapping = colMapping;
    if (fragmentMetaInfo.hasValidEventTimeRange()) {
        this.filterTimeRangeChecker = new CompareFilterTimeRangeChecker(fragmentMetaInfo.getMinEventTime(),
                fragmentMetaInfo.getMaxEventTime(), true);
    }
    buf = ByteBuffer.allocate(recordCodec.getMaxDimLength());
}
 
Example #24
Source File: FilterEvaluateTest.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Test
public void testEvaluate00() {
    List<TblColRef> groups = buildGroups();
    TupleFilter filter = buildEQCompareFilter(groups, 0);

    byte[] bytes = TupleFilterSerializer.serialize(filter, CS);
    TupleFilter newFilter = TupleFilterSerializer.deserialize(bytes, CS);

    int number = 10000;
    int[] matcheCounts = new int[] { 0, 0, 0 };
    Collection<Tuple> tuples = generateTuple(number, groups, matcheCounts);
    int match = evaluateTuples(tuples, newFilter);

    assertEquals(match, matcheCounts[0]);
}
 
Example #25
Source File: ExpressionColCollector.java    From kylin with Apache License 2.0 5 votes vote down vote up
public static Set<TblColRef> collectColumns(TupleExpression tupleExpression) {
    Pair<Set<TblColRef>, Set<TblColRef>> pairRet = collectColumnsPair(tupleExpression);
    Set<TblColRef> ret = Sets.newHashSet();
    ret.addAll(pairRet.getFirst());
    ret.addAll(pairRet.getSecond());
    return ret;
}
 
Example #26
Source File: AggregationGroup.java    From kylin with Apache License 2.0 5 votes vote down vote up
private void buildPartialCubeFullMask(RowKeyDesc rowKeyDesc) {
    Preconditions.checkState(this.includes != null);
    Preconditions.checkState(this.includes.length != 0);

    partialCubeFullMask = 0L;
    for (String dim : this.includes) {
        TblColRef hColumn = cubeDesc.getModel().findColumn(dim);
        Integer index = rowKeyDesc.getColumnBitIndex(hColumn);
        long bit = 1L << index;
        partialCubeFullMask |= bit;
    }
}
 
Example #27
Source File: IIJoinedFlatTableDesc.java    From Kylin with Apache License 2.0 5 votes vote down vote up
private void parseIIDesc() {
    this.tableName = "kylin_intermediate_ii_" + iiDesc.getName();

    int columnIndex = 0;
    for (TblColRef col : iiDesc.listAllColumns()) {
        columnList.add(new IntermediateColumnDesc(String.valueOf(columnIndex), col));
        columnIndex++;
    }
}
 
Example #28
Source File: FilterSerializeTest.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Test
public void testSerialize05() {
    ColumnDesc column = new ColumnDesc();
    TblColRef colRef = column.getRef();
    List<TblColRef> groups = new ArrayList<TblColRef>();
    groups.add(colRef);

    assertFilterSerDe(buildEQCompareFilter(groups, 0));
}
 
Example #29
Source File: EndpointTupleIterator.java    From Kylin with Apache License 2.0 5 votes vote down vote up
private String findName(TblColRef column, Map<TblColRef, String> aliasMap) {
    String name = null;
    if (aliasMap != null) {
        name = aliasMap.get(column);
    }
    if (name == null) {
        name = column.getName();
    }
    return name;

}
 
Example #30
Source File: AggregationGroup.java    From kylin with Apache License 2.0 5 votes vote down vote up
private void buildMandatoryColumnMask(RowKeyDesc rowKeyDesc) {
    mandatoryColumnMask = 0L;

    String[] mandatory_dims = this.selectRule.mandatoryDims;
    if (mandatory_dims == null || mandatory_dims.length == 0) {
        return;
    }

    for (String dim : mandatory_dims) {
        TblColRef hColumn = cubeDesc.getModel().findColumn(dim);
        Integer index = rowKeyDesc.getColumnBitIndex(hColumn);
        mandatoryColumnMask |= (1L << index);
    }
}