Java Code Examples for org.apache.kylin.common.KylinConfig#SetAndUnsetThreadLocalConfig

The following examples show how to use org.apache.kylin.common.KylinConfig#SetAndUnsetThreadLocalConfig . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: FlinkCubingByLayer.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
    KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(kConfig)) {
        CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
        CubeDesc cubeDesc = cubeInstance.getDescriptor();
        CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId);
        CubeJoinedFlatTableEnrich interDesc = new CubeJoinedFlatTableEnrich(
                EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc);
        long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
        Cuboid baseCuboid = Cuboid.findForMandatory(cubeDesc, baseCuboidId);
        baseCuboidBuilder = new BaseCuboidBuilder(kConfig, cubeDesc, cubeSegment, interDesc,
                AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid),
                MeasureIngester.create(cubeDesc.getMeasures()), cubeSegment.buildDictionaryMap());
    }
}
 
Example 2
Source File: FlinkCubingByLayer.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
    KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(kConfig)) {
        CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
        CubeDesc cubeDesc = cubeInstance.getDescriptor();
        CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId);
        CubeJoinedFlatTableEnrich interDesc = new CubeJoinedFlatTableEnrich(
                EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc);
        long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
        Cuboid baseCuboid = Cuboid.findForMandatory(cubeDesc, baseCuboidId);
        baseCuboidBuilder = new BaseCuboidBuilder(kConfig, cubeDesc, cubeSegment, interDesc,
                AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid),
                MeasureIngester.create(cubeDesc.getMeasures()), cubeSegment.buildDictionaryMap());
    }
}
 
Example 3
Source File: SparkBuildDictionary.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
private void init() {
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(config)) {
        cubeSegment = CubeManager.getInstance(config).getCube(cubeName).getSegmentById(segmentId);
    }
    initialized = true;
}
 
Example 4
Source File: SparkCubingByLayer.java    From kylin with Apache License 2.0 5 votes vote down vote up
public void init() {
    KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(kConfig)) {
        CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
        cubeDesc = cubeInstance.getDescriptor();
        aggregators = new MeasureAggregators(cubeDesc.getMeasures());
        measureNum = cubeDesc.getMeasures().size();
    }
}
 
Example 5
Source File: FlinkCubingByLayer.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
    KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(kConfig)) {
        CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
        this.cubeSegment = cubeInstance.getSegmentById(segmentId);
        this.cubeDesc = cubeInstance.getDescriptor();
        this.ndCuboidBuilder = new NDCuboidBuilder(cubeSegment, new RowKeyEncoderProvider(cubeSegment));
        this.rowKeySplitter = new RowKeySplitter(cubeSegment);
    }
}
 
Example 6
Source File: SparkFactDistinct.java    From kylin with Apache License 2.0 5 votes vote down vote up
private void init() {
    KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(kConfig)) {
        CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
        reducerMapping = new FactDistinctColumnsReducerMapping(cubeInstance);
        initialized = true;
    }
}
 
Example 7
Source File: SparkBuildDictionary.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
private void init() {
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(config)) {
        cubeSegment = CubeManager.getInstance(config).getCube(cubeName).getSegmentById(segmentId);
        dictManager = DictionaryManager.getInstance(config);
    }
    initialized = true;
}
 
Example 8
Source File: SparkMergingDictionary.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
private void init() {
    kylinConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(kylinConfig)) {
        CubeInstance cubeInstance = CubeManager.getInstance(kylinConfig).getCube(cubeName);
        dictMgr = DictionaryManager.getInstance(kylinConfig);
        mergingSegments = getMergingSegments(cubeInstance, segmentIds);
    }
}
 
Example 9
Source File: SparkBuildDictionary.java    From kylin with Apache License 2.0 5 votes vote down vote up
private void init() {
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(config)) {
        cubeManager = CubeManager.getInstance(config);
        cubeSegment = cubeManager.getCube(cubeName).getSegmentById(segmentId);
    }
    initialized = true;
}
 
Example 10
Source File: FlinkFactDistinctColumns.java    From kylin with Apache License 2.0 5 votes vote down vote up
private void init() {
    KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(kConfig)) {
        CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
        reducerMapping = new FactDistinctColumnsReducerMapping(cubeInstance);
        initialized = true;
    }
}
 
Example 11
Source File: SparkBuildDictionary.java    From kylin with Apache License 2.0 5 votes vote down vote up
private void init() {
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(config)) {
        cubeSegment = CubeManager.getInstance(config).getCube(cubeName).getSegmentById(segmentId);
        dictManager = DictionaryManager.getInstance(config);
    }
    initialized = true;
}
 
Example 12
Source File: SparkCubingByLayer.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
public void init() {
    KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(kConfig)) {
        CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
        this.cubeSegment = cubeInstance.getSegmentById(segmentId);
        this.cubeDesc = cubeInstance.getDescriptor();
        this.ndCuboidBuilder = new NDCuboidBuilder(cubeSegment, new RowKeyEncoderProvider(cubeSegment));
        this.rowKeySplitter = new RowKeySplitter(cubeSegment);
    }
}
 
Example 13
Source File: SparkBuildDictionary.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
public Tuple2<String, String> call(Tuple2<String, Iterable<TableRef>> snapShot) throws Exception {
    if (initialized == false) {
        synchronized (SparkBuildDictionary.class) {
            if (initialized == false) {
                init();
            }
        }
    }
    String tableIdentity = snapShot._1();
    String snapshotPath = "";
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(config)) {
        logger.info("Building snapshot of {}", tableIdentity);

        if (cubeSegment.getModel().isLookupTable(tableIdentity) && !cubeSegment.getCubeDesc().isExtSnapshotTable(tableIdentity)) {
            try {
                snapshotPath = buildSnapshotTable(config, cubeSegment, tableIdentity, jobId);
            } catch (IOException e) {
                logger.error("Error while build snapshot table " + tableIdentity + ", " + e.getMessage());
                return new Tuple2<>(tableIdentity, snapshotPath);
            }
        }
    }

    return new Tuple2<>(tableIdentity, snapshotPath);
}
 
Example 14
Source File: SparkUHCDictionary.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
public Tuple2<Integer, List<String>> call(String sequenceFilePath) throws Exception {
    Path path = new Path(sequenceFilePath);
    logger.info("Column absolute path is " + path.toString());
    if (!HadoopUtil.getFileSystem(path).exists(path)) {
        return new Tuple2<>(-1, null);
    }

    String columnName = path.getParent().getName();
    int index = -1;
    for (int i = 0;i < uhcColumns.size(); i++) {
        if (uhcColumns.get(i).getIdentity().equalsIgnoreCase(columnName)) {
            index = i;
            break;
        }
    }

    if (index == -1) {
        return new Tuple2<>(-1, null);
    }

    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(config)) {
        List<String> values = Lists.newArrayList();
        values.addAll(HadoopUtil.readDistinctColumnValues(sequenceFilePath));

        logger.info("UHC column " + columnName + " contains distinct values " + values);

        return new Tuple2<>(index, values);
    }
}
 
Example 15
Source File: FlinkCubingByLayer.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
    KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(kConfig)) {
        CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
        cubeDesc = cubeInstance.getDescriptor();
        aggregators = new MeasureAggregators(cubeDesc.getMeasures());
        measureNum = cubeDesc.getMeasures().size();
    }
}
 
Example 16
Source File: FlinkCubingByLayer.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
    KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(kConfig)) {
        CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
        this.cubeSegment = cubeInstance.getSegmentById(segmentId);
        this.cubeDesc = cubeInstance.getDescriptor();
        this.ndCuboidBuilder = new NDCuboidBuilder(cubeSegment, new RowKeyEncoderProvider(cubeSegment));
        this.rowKeySplitter = new RowKeySplitter(cubeSegment);
    }
}
 
Example 17
Source File: SparkBuildDictionary.java    From kylin with Apache License 2.0 5 votes vote down vote up
@Override
public Tuple2<String, Tuple3<String, Integer, Integer>> call(TblColRef tblColRef) throws Exception {
    if (initialized == false) {
        synchronized (SparkBuildDictionary.class) {
            if (initialized == false) {
                init();
            }
        }
    }

    logger.info("Building dictionary for column {}", tblColRef);
    IReadableTable inpTable = getDistinctValuesFor(tblColRef);
    Dictionary<String> preBuiltDict;
    DictionaryInfo dictInfo;
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(config)) {
        preBuiltDict = getDictionary(tblColRef);

        if (preBuiltDict != null) {
            logger.info("Dict for '{}' has already been built, save it", tblColRef.getName());
            dictInfo = dictManager.saveDictionary(tblColRef, inpTable, preBuiltDict);
        } else {
            logger.info("Dict for '{}' not pre-built, build it from {}", tblColRef.getName(), inpTable);
            String builderClass = cubeSegment.getCubeDesc().getDictionaryBuilderClass(tblColRef);
            dictInfo = dictManager.buildDictionary(tblColRef, inpTable, builderClass);
            preBuiltDict = dictInfo.getDictionaryObject();
        }
    }

    return new Tuple2<>(tblColRef.getIdentity(),
            new Tuple3<>(dictInfo.getResourcePath(), preBuiltDict.getSize(), preBuiltDict.getSizeOfId()));
}
 
Example 18
Source File: FlinkCubingByLayer.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
    KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(kConfig)) {
        CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
        cubeDesc = cubeInstance.getDescriptor();
        aggregators = new MeasureAggregators(cubeDesc.getMeasures());
        measureNum = cubeDesc.getMeasures().size();
    }
}
 
Example 19
Source File: FactDistinctColumnsBase.java    From kylin with Apache License 2.0 4 votes vote down vote up
public void setupMap() {
    outputKey = new Text();
    outputValue = new Text();
    emptyText = new Text();
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(envConfig)) {
        cube = CubeManager.getInstance(envConfig).getCube(cubeName);
        cubeSeg = cube.getSegmentById(segmentId);
        cubeDesc = cube.getDescriptor();
        baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
        reducerMapping = new FactDistinctColumnsReducerMapping(cube);
        allCols = reducerMapping.getAllDimDictCols();

        intermediateTableDesc = new CubeJoinedFlatTableEnrich(EngineFactory.getJoinedFlatTableDesc(cubeSeg), cubeDesc);
        columnIndex = new int[allCols.size()];
        for (int i = 0; i < allCols.size(); i++) {
            TblColRef colRef = allCols.get(i);
            int columnIndexOnFlatTbl = intermediateTableDesc.getColumnIndex(colRef);
            columnIndex[i] = columnIndexOnFlatTbl;
        }

        tmpbuf = ByteBuffer.allocate(4096);
        nRowKey = cubeDesc.getRowkey().getRowKeyColumns().length;

        Set<Long> cuboidIdSet = Sets.newHashSet(cubeSeg.getCuboidScheduler().getAllCuboidIds());
        if (StatisticsDecisionUtil.isAbleToOptimizeCubingPlan(cubeSeg)) {
            // For cube planner, for every prebuilt cuboid, its related row count stats should be calculated
            // If the precondition for trigger cube planner phase one is satisfied, we need to calculate row count stats for mandatory cuboids.
            cuboidIdSet.addAll(cubeSeg.getCubeDesc().getMandatoryCuboids());
        }
        cuboidIds = cuboidIdSet.toArray(new Long[cuboidIdSet.size()]);
        allCuboidsBitSet = CuboidUtil.getCuboidBitSet(cuboidIds, nRowKey);

        allCuboidsHLL = new HLLCounter[cuboidIds.length];
        for (int i = 0; i < cuboidIds.length; i++) {
            allCuboidsHLL[i] = new HLLCounter(cubeDesc.getConfig().getCubeStatsHLLPrecision(), RegisterType.DENSE);
        }

        //for KYLIN-2518 backward compatibility
        boolean isUsePutRowKeyToHllNewAlgorithm;
        if (KylinVersion.isBefore200(cubeDesc.getVersion())) {
            isUsePutRowKeyToHllNewAlgorithm = false;
            logger.info("Found KylinVersion : {}. Use old algorithm for cuboid sampling.", cubeDesc.getVersion());
        } else {
            isUsePutRowKeyToHllNewAlgorithm = true;
            logger.info(
                    "Found KylinVersion : {}. Use new algorithm for cuboid sampling. About the details of the new algorithm, please refer to KYLIN-2518",
                    cubeDesc.getVersion());
        }

        int calculatorNum = getStatsThreadNum(cuboidIds.length);
        cuboidStatCalculators = new CuboidStatCalculator[calculatorNum];
        int splitSize = cuboidIds.length / calculatorNum;
        if (splitSize <= 0) {
            splitSize = 1;
        }
        for (int i = 0; i < calculatorNum; i++) {
            HLLCounter[] cuboidsHLLSplit;
            Integer[][] cuboidsBitSetSplit;
            Long[] cuboidIdSplit;
            int start = i * splitSize;
            if (start >= cuboidIds.length) {
                break;
            }
            int end = (i + 1) * splitSize;
            if (i == calculatorNum - 1) {// last split
                end = cuboidIds.length;
            }

            cuboidsHLLSplit = Arrays.copyOfRange(allCuboidsHLL, start, end);
            cuboidsBitSetSplit = Arrays.copyOfRange(allCuboidsBitSet, start, end);
            cuboidIdSplit = Arrays.copyOfRange(cuboidIds, start, end);
            CuboidStatCalculator calculator = new CuboidStatCalculator(i,
                    intermediateTableDesc.getRowKeyColumnIndexes(), cuboidIdSplit, cuboidsBitSetSplit,
                    isUsePutRowKeyToHllNewAlgorithm, cuboidsHLLSplit);
            cuboidStatCalculators[i] = calculator;
            calculator.start();
        }

        // setup dict col deduper
        dictColDeduper = new DictColDeduper();
        Set<TblColRef> dictCols = cubeDesc.getAllColumnsNeedDictionaryBuilt();
        for (int i = 0; i < allCols.size(); i++) {
            if (dictCols.contains(allCols.get(i)))
                dictColDeduper.setIsDictCol(i);
        }
    }
}
 
Example 20
Source File: SparkFactDistinct.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
@Override
public Iterator<Tuple2<String, Tuple3<Writable, Writable, String>>> call(
        Iterator<Tuple2<SelfDefineSortableKey, Text>> tuple2Iterator) throws Exception {
    if (initialized == false) {
        synchronized (SparkFactDistinct.class) {
            if (initialized == false) {
                init();
            }
        }
    }

    if (isStatistics) {
        // calculate hll
        calculateStatistics(tuple2Iterator);

        // output the hll info
        List<Long> allCuboids = Lists.newArrayList();
        allCuboids.addAll(cuboidHLLMap.keySet());
        Collections.sort(allCuboids);

        logMapperAndCuboidStatistics(allCuboids); // for human check
        outputStatistics(allCuboids, result);
    } else {
        // calculate dict/dimRange/
        calculateColData(tuple2Iterator);

        // output dim range
        if (isDimensionCol) {
            outputDimRangeInfo(result);
        }

        // output dict object
        if (buildDictInReducer) {
            try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
                    .setAndUnsetThreadLocalConfig(kConfig)) {
                Dictionary<String> dict = builder.build();
                outputDict(col, dict, result);
            }
        }
    }

    return result.iterator();
}