Java Code Examples for org.apache.kylin.cube.CubeInstance

The following examples show how to use org.apache.kylin.cube.CubeInstance. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: kylin   Source File: UpdateHTableHostCLI.java    License: Apache License 2.0 6 votes vote down vote up
private static List<String> filterByCubes(List<String> allTableNames, List<String> cubeNames) {
    CubeManager cubeManager = CubeManager.getInstance(KylinConfig.getInstanceFromEnv());
    List<String> result = Lists.newArrayList();
    for (String c : cubeNames) {
        c = c.trim();
        if (c.endsWith(","))
            c = c.substring(0, c.length() - 1);

        CubeInstance cubeInstance = cubeManager.getCube(c);
        for (CubeSegment segment : cubeInstance.getSegments()) {
            String tableName = segment.getStorageLocationIdentifier();
            if (allTableNames.contains(tableName)) {
                result.add(tableName);
            }
        }
    }
    return result;
}
 
Example 2
Source Project: kylin-on-parquet-v2   Source File: FlinkCubingByLayer.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void open(Configuration parameters) throws Exception {
    KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
    try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
            .setAndUnsetThreadLocalConfig(kConfig)) {
        CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
        CubeDesc cubeDesc = cubeInstance.getDescriptor();
        CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId);
        CubeJoinedFlatTableEnrich interDesc = new CubeJoinedFlatTableEnrich(
                EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc);
        long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
        Cuboid baseCuboid = Cuboid.findForMandatory(cubeDesc, baseCuboidId);
        baseCuboidBuilder = new BaseCuboidBuilder(kConfig, cubeDesc, cubeSegment, interDesc,
                AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid),
                MeasureIngester.create(cubeDesc.getMeasures()), cubeSegment.buildDictionaryMap());
    }
}
 
Example 3
Source Project: Kylin   Source File: CubeService.java    License: Apache License 2.0 6 votes vote down vote up
private boolean isCubeInProject(String projectName, CubeInstance target) {
    ProjectManager projectManager = getProjectManager();
    ProjectInstance project = projectManager.getProject(projectName);
    if (project == null) {
        return false;
    }
    for (RealizationEntry projectDataModel : project.getRealizationEntries()) {
        if (projectDataModel.getType() == RealizationType.CUBE) {
            CubeInstance cube = getCubeManager().getCube(projectDataModel.getRealization());
            assert cube != null;
            if (cube.equals(target)) {
                return true;
            }
        }
    }
    return false;
}
 
Example 4
Source Project: kylin-on-parquet-v2   Source File: MetaDumpUtil.java    License: Apache License 2.0 6 votes vote down vote up
public static Set<String> collectCubeMetadata(CubeInstance cube) {
    // cube, model_desc, cube_desc, table
    TableMetadataManager tableMgr = TableMetadataManager.getInstance(cube.getConfig());
    Set<String> dumpList = new LinkedHashSet<>();
    dumpList.add(cube.getResourcePath());
    dumpList.add(cube.getDescriptor().getModel().getResourcePath());
    dumpList.add(cube.getDescriptor().getResourcePath());
    dumpList.add(cube.getProjectInstance().getResourcePath());

    for (TableRef tableRef : cube.getDescriptor().getModel().getAllTables()) {
        TableDesc table = tableRef.getTableDesc();
        dumpList.add(table.getResourcePath());
        dumpList.add(tableMgr.getTableExt(table).getResourcePath());
    }

    return dumpList;
}
 
Example 5
Source Project: kylin-on-parquet-v2   Source File: DeployUtil.java    License: Apache License 2.0 6 votes vote down vote up
public static void prepareTestDataForStreamingCube(long startTime, long endTime, int numberOfRecords,
        String cubeName, StreamDataLoader streamDataLoader) throws IOException {
    CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube(cubeName);
    List<String> data = StreamingTableDataGenerator.generate(numberOfRecords, startTime, endTime,
            cubeInstance.getRootFactTable(), cubeInstance.getProject());
    //load into kafka
    streamDataLoader.loadIntoKafka(data);
    logger.info("Write {} messages into {}", data.size(), streamDataLoader.toString());

    //csv data for H2 use
    TableRef factTable = cubeInstance.getModel().getRootFactTable();
    List<TblColRef> tableColumns = Lists.newArrayList(factTable.getColumns());
    TimedJsonStreamParser timedJsonStreamParser = new TimedJsonStreamParser(tableColumns, null);
    StringBuilder sb = new StringBuilder();
    for (String json : data) {
        List<String> rowColumns = timedJsonStreamParser
                .parse(ByteBuffer.wrap(json.getBytes(StandardCharsets.UTF_8))).get(0).getData();
        sb.append(StringUtils.join(rowColumns, ","));
        sb.append(System.getProperty("line.separator"));
    }
    appendFactTableData(sb.toString(), cubeInstance.getRootFactTable());
}
 
Example 6
Source Project: Kylin   Source File: JobControllerTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testBasics() throws IOException, PersistentException {
    CubeDesc cubeDesc = cubeDescManager.getCubeDesc("test_kylin_cube_with_slr_left_join_desc");
    CubeInstance cube = cubeManager.createCube(CUBE_NAME, "DEFAULT", cubeDesc, "test");
    assertNotNull(cube);

    JobListRequest jobRequest = new JobListRequest();
    Assert.assertNotNull(jobSchedulerController.list(jobRequest));

    JobBuildRequest jobBuildRequest = new JobBuildRequest();
    jobBuildRequest.setBuildType("BUILD");
    jobBuildRequest.setStartTime(0L);
    jobBuildRequest.setEndTime(new Date().getTime());
    JobInstance job = cubeController.rebuild(CUBE_NAME, jobBuildRequest);

    Assert.assertNotNull(jobSchedulerController.get(job.getId()));
    executableDAO.deleteJob(job.getId());
    if (cubeManager.getCube(CUBE_NAME) != null) {
        cubeManager.dropCube(CUBE_NAME, false);
    }

    // jobSchedulerController.cancel(job.getId());
}
 
Example 7
Source Project: kylin   Source File: LookupSnapshotBuildJob.java    License: Apache License 2.0 6 votes vote down vote up
private static LookupSnapshotBuildJob initJob(CubeInstance cube, String tableName, String submitter,
        KylinConfig kylinConfig) {
    List<ProjectInstance> projList = ProjectManager.getInstance(kylinConfig).findProjects(cube.getType(),
            cube.getName());
    if (projList == null || projList.size() == 0) {
        throw new RuntimeException("Cannot find the project containing the cube " + cube.getName() + "!!!");
    } else if (projList.size() >= 2) {
        String msg = "Find more than one project containing the cube " + cube.getName()
                + ". It does't meet the uniqueness requirement!!! ";
        throw new RuntimeException(msg);
    }

    LookupSnapshotBuildJob result = new LookupSnapshotBuildJob();
    SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss", Locale.ROOT);
    format.setTimeZone(TimeZone.getTimeZone(kylinConfig.getTimeZone()));
    result.setDeployEnvName(kylinConfig.getDeployEnv());
    result.setProjectName(projList.get(0).getName());
    CubingExecutableUtil.setCubeName(cube.getName(), result.getParams());
    result.setName(JOB_TYPE + " CUBE - " + cube.getName() + " - " + " TABLE - " + tableName + " - "
            + format.format(new Date(System.currentTimeMillis())));
    result.setSubmitter(submitter);
    result.setNotifyList(cube.getDescriptor().getNotifyList());
    return result;
}
 
Example 8
Source Project: kylin-on-parquet-v2   Source File: BuildJobSubmitterTest.java    License: Apache License 2.0 6 votes vote down vote up
void prepareTestCheckSegmentBuildJobFromMetadata() {
    CubeSegment cubeSegment = stubCubSegment(SegmentStatusEnum.NEW, 100L, 200L);
    CubeInstance cubeInstance = stubCubeInstance(cubeSegment);
    config = stubKylinConfig();
    when(cubeInstance.getConfig()).thenReturn(config);

    cubeManager = stubCubeManager(cubeInstance, false);

    Map<String, CubingJob> cubingJobMap = new HashMap<>();
    cubingJobMap.put(mockBuildJob1, stubCubingJob(ExecutableState.SUCCEED));
    cubingJobMap.put(mockBuildJob2, stubCubingJob(ExecutableState.DISCARDED));
    cubingJobMap.put(mockBuildJob3, stubCubingJob(ExecutableState.DISCARDED));
    cubingJobMap.put(mockBuildJob4, stubCubingJob(ExecutableState.ERROR));

    executableManager = stubExecutableManager(cubingJobMap);
    streamingCoordinator = stubStreamingCoordinator(config, cubeManager, executableManager);
    clusterManager = stubReceiverClusterManager(streamingCoordinator);
    when(streamingCoordinator.getClusterManager()).thenReturn(clusterManager);
}
 
Example 9
Source Project: kylin   Source File: QueryService.java    License: Apache License 2.0 6 votes vote down vote up
private void resetRealizationInContext(OLAPContext olapContext) {
    IRealization realization = olapContext.realization;
    if (realization == null) {
        return;
    }
    KylinConfig config = getConfig();
    HybridInstance hybridInstance = HybridManager.getInstance(config).getHybridInstance(realization.getName());
    if (hybridInstance != null) {
        olapContext.realization = hybridInstance;
        return;
    }
    CubeInstance cubeInstance = CubeManager.getInstance(config).getCube(realization.getName());
    if (cubeInstance != null) {
        olapContext.realization = cubeInstance;
    }
}
 
Example 10
Source Project: kylin   Source File: StreamingTestBase.java    License: Apache License 2.0 6 votes vote down vote up
CubeInstance stubCubeInstance(CubeSegment cubSegment) {
    CubeInstance cubeInstance = mock(CubeInstance.class);
    CubeSegment readySegment = stubCubSegment(SegmentStatusEnum.READY, 0L, 1L);
    when(cubeInstance.latestCopyForWrite()).thenReturn(cubeInstance);
    @SuppressWarnings("unchecked")
    Segments<CubeSegment> segmentSegments = mock(Segments.class, RETURNS_DEEP_STUBS);

    Segments<CubeSegment> optimizedSegments = mock(Segments.class, RETURNS_DEEP_STUBS);

    when(segmentSegments.size()).thenReturn(1);
    when(cubeInstance.getBuildingSegments()).thenReturn(segmentSegments);
    when(cubeInstance.getName()).thenReturn(cubeName1);
    when(cubeInstance.getSegment(anyString(), Matchers.any())).thenReturn(cubSegment);

    when(optimizedSegments.size()).thenReturn(0);
    when(cubeInstance.getLatestReadySegment()).thenReturn(readySegment);
    when(cubeInstance.getSegments(SegmentStatusEnum.READY_PENDING)).thenReturn(optimizedSegments);
    when(cubeInstance.getSegments(SegmentStatusEnum.NEW)).thenReturn(segmentSegments);

    return cubeInstance;
}
 
Example 11
Source Project: kylin-on-parquet-v2   Source File: MergeCuboidMapper.java    License: Apache License 2.0 6 votes vote down vote up
@Override
protected void doSetup(Context context) throws IOException, InterruptedException {
    super.bindCurrentConfiguration(context.getConfiguration());

    String cubeName = context.getConfiguration().get(BatchConstants.CFG_CUBE_NAME);
    String segmentID = context.getConfiguration().get(BatchConstants.CFG_CUBE_SEGMENT_ID);

    KylinConfig config = AbstractHadoopJob.loadKylinPropsAndMetadata();

    CubeManager cubeManager = CubeManager.getInstance(config);
    CubeInstance cube = cubeManager.getCube(cubeName);
    CubeDesc cubeDesc = cube.getDescriptor();
    CubeSegment mergedCubeSegment = cube.getSegmentById(segmentID);

    // decide which source segment
    FileSplit fileSplit = (FileSplit) context.getInputSplit();
    IMROutput2.IMRMergeOutputFormat outputFormat = MRUtil.getBatchMergeOutputSide2(mergedCubeSegment)
            .getOutputFormat();
    CubeSegment sourceCubeSegment = outputFormat.findSourceSegment(fileSplit, cube);
    reEncoder = new SegmentReEncoder(cubeDesc, sourceCubeSegment, mergedCubeSegment, config);
}
 
Example 12
Source Project: Kylin   Source File: CubeSizeEstimationCLI.java    License: Apache License 2.0 6 votes vote down vote up
public static long estimatedCubeSize(String cubeName, long[] cardinality) {
    KylinConfig config = KylinConfig.getInstanceFromEnv();
    CubeManager cubeManager = CubeManager.getInstance(config);
    CubeInstance cubeInstance = cubeManager.getCube(cubeName);
    CubeDesc cubeDesc = cubeInstance.getDescriptor();

    CuboidScheduler scheduler = new CuboidScheduler(cubeDesc);
    long baseCuboid = Cuboid.getBaseCuboidId(cubeDesc);
    LinkedList<Long> cuboidQueue = new LinkedList<Long>();
    cuboidQueue.push(baseCuboid);

    long totalSpace = 0;

    while (!cuboidQueue.isEmpty()) {
        long cuboidID = cuboidQueue.pop();
        Collection<Long> spanningCuboid = scheduler.getSpanningCuboid(cuboidID);
        for (Long sc : spanningCuboid) {
            cuboidQueue.push(sc);
        }

        totalSpace += estimateCuboidSpace(cuboidID, cardinality, cubeDesc);
    }
    return totalSpace;
}
 
Example 13
Source Project: kylin   Source File: BuildJobSubmitterTest.java    License: Apache License 2.0 6 votes vote down vote up
void prepareTestCheckSegmentBuildJobFromMetadata() {
    CubeSegment cubeSegment = stubCubSegment(SegmentStatusEnum.NEW, 100L, 200L);
    CubeInstance cubeInstance = stubCubeInstance(cubeSegment);
    config = stubKylinConfig();
    when(cubeInstance.getConfig()).thenReturn(config);

    cubeManager = stubCubeManager(cubeInstance, false);

    Map<String, CubingJob> cubingJobMap = new HashMap<>();
    cubingJobMap.put(mockBuildJob1, stubCubingJob(ExecutableState.SUCCEED));
    cubingJobMap.put(mockBuildJob2, stubCubingJob(ExecutableState.DISCARDED));
    cubingJobMap.put(mockBuildJob3, stubCubingJob(ExecutableState.DISCARDED));
    cubingJobMap.put(mockBuildJob4, stubCubingJob(ExecutableState.ERROR));

    executableManager = stubExecutableManager(cubingJobMap);
    streamingCoordinator = stubStreamingCoordinator(config, cubeManager, executableManager);
    clusterManager = stubReceiverClusterManager(streamingCoordinator);
    when(streamingCoordinator.getClusterManager()).thenReturn(clusterManager);
}
 
Example 14
Source Project: kylin-on-parquet-v2   Source File: DashboardService.java    License: Apache License 2.0 6 votes vote down vote up
private Map<String, String> getCubeFilterMap(CategoryEnum category, String cubeName) {
    HashMap<String, String> filterMap = new HashMap<>();

    if (category == CategoryEnum.QUERY) {
        filterMap.put(QueryPropertyEnum.EXCEPTION.toString() + " = ?", "NULL");

        if (!Strings.isNullOrEmpty(cubeName)) {
            filterMap.put(QueryPropertyEnum.REALIZATION + " = ?", cubeName);
        }
    } else if (category == CategoryEnum.JOB && !Strings.isNullOrEmpty(cubeName)) {
        HybridInstance hybridInstance = getHybridManager().getHybridInstance(cubeName);
        if (null != hybridInstance) {
            StringBuffer cubeNames = new StringBuffer();
            for (CubeInstance cube : getCubeByHybrid(hybridInstance)) {
                cubeNames.append(",'" + cube.getName() + "'");
            }
            filterMap.put(JobPropertyEnum.CUBE.toString() + " IN (?)", cubeNames.substring(1));
        } else {
            filterMap.put(JobPropertyEnum.CUBE.toString() + " = ?", cubeName);
        }
    }
    return filterMap;
}
 
Example 15
Source Project: kylin-on-parquet-v2   Source File: CubeController.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Delete a cube segment
 *
 * @throws IOException
 */
@RequestMapping(value = "/{cubeName}/segs/{segmentName}", method = { RequestMethod.DELETE }, produces = {
        "application/json" })
@ResponseBody
public CubeInstance deleteSegment(@PathVariable String cubeName, @PathVariable String segmentName) {
    checkCubeExists(cubeName);
    CubeInstance cube = cubeService.getCubeManager().getCube(cubeName);

    CubeSegment segment = cube.getSegment(segmentName, null);
    if (segment == null) {
        throw new NotFoundException("Cannot find segment '" + segmentName + "'");
    }

    try {
        return cubeService.deleteSegment(cube, segmentName);
    } catch (Exception e) {
        logger.error(e.getLocalizedMessage(), e);
        throw new InternalErrorException(e.getLocalizedMessage(), e);
    }
}
 
Example 16
Source Project: kylin   Source File: UpdateCubeInfoAfterBuildStep.java    License: Apache License 2.0 5 votes vote down vote up
private void saveExtSnapshotIfNeeded(CubeManager cubeManager, CubeInstance cube, CubeSegment segment)
        throws IOException {
    String extLookupSnapshotStr = this.getParam(BatchConstants.ARG_EXT_LOOKUP_SNAPSHOTS_INFO);
    if (extLookupSnapshotStr == null || extLookupSnapshotStr.isEmpty()) {
        return;
    }
    Map<String, String> extLookupSnapshotMap = LookupMaterializeContext.parseLookupSnapshots(extLookupSnapshotStr);
    logger.info("update ext lookup snapshots:{}", extLookupSnapshotMap);
    List<SnapshotTableDesc> snapshotTableDescList = cube.getDescriptor().getSnapshotTableDescList();
    for (SnapshotTableDesc snapshotTableDesc : snapshotTableDescList) {
        String tableName = snapshotTableDesc.getTableName();
        if (snapshotTableDesc.isExtSnapshotTable()) {
            String newSnapshotResPath = extLookupSnapshotMap.get(tableName);
            if (newSnapshotResPath == null || newSnapshotResPath.isEmpty()) {
                continue;
            }

            if (snapshotTableDesc.isGlobal()) {
                if (!newSnapshotResPath.equals(cube.getSnapshotResPath(tableName))) {
                    cubeManager.updateCubeLookupSnapshot(cube, tableName, newSnapshotResPath);
                }
            } else {
                segment.putSnapshotResPath(tableName, newSnapshotResPath);
            }
        }
    }
}
 
Example 17
Source Project: Kylin   Source File: CubeController.java    License: Apache License 2.0 5 votes vote down vote up
@RequestMapping(value = "/{cubeName}/cost", method = {RequestMethod.PUT})
@ResponseBody
@Metered(name = "updateCubeCost")
public CubeInstance updateCubeCost(@PathVariable String cubeName, @RequestParam(value = "cost") int cost) {
    try {
        return cubeService.updateCubeCost(cubeName, cost);
    } catch (Exception e) {
        String message = "Failed to update cube cost: " + cubeName + " : " + cost;
        logger.error(message, e);
        throw new InternalErrorException(message + " Caused by: " + e.getMessage(), e);
    }
}
 
Example 18
Source Project: kylin   Source File: KylinHealthCheckJob.java    License: Apache License 2.0 5 votes vote down vote up
private void checkCubeDescParams(List<CubeInstance> cubes) {
    for (CubeInstance cube : cubes) {
        CubeDesc desc = cube.getDescriptor();
        long[] autoMergeTS = desc.getAutoMergeTimeRanges();
        if (autoMergeTS == null || autoMergeTS.length == 0) {
            logger.info("Cube: {} in project: {} with no auto merge params", cube.getName(), cube.getProject());
        }
        // long volatileRange = desc.getVolatileRange();
        long retentionRange = desc.getRetentionRange();
        if (retentionRange == 0) {
            logger.info("Cube: {} in project: {} with no retention params", cube.getName(), cube.getProject());
        }
        // queue params
    }
}
 
Example 19
Source Project: kylin   Source File: KylinHealthCheckJob.java    License: Apache License 2.0 5 votes vote down vote up
private void checkTooManySegments(List<CubeInstance> cubes) {
    reporter.log("## Checking too many segments of Cubes");
    int warningSegmentNum = config.getWarningSegmentNum();
    if (warningSegmentNum < 0) {
        return;
    }
    for (CubeInstance cube : cubes) {
        if (cube.getSegments().size() >= warningSegmentNum) {
            reporter.log("Too many segments: {} for cube: {}, project: {}, please merge the segments",
                    cube.getSegments().size(), cube.getName(), cube.getProject());
        }
    }
}
 
Example 20
public BatchOptimizeJobCheckpointBuilder(CubeInstance cube, String submitter) {
    this.cube = cube;
    this.submitter = submitter;

    Preconditions.checkNotNull(cube.getFirstSegment(), "Cube " + cube + " is empty!!!");
    this.outputSide = MRUtil.getBatchOptimizeOutputSide2(cube.getFirstSegment());
}
 
Example 21
Source Project: kylin-on-parquet-v2   Source File: SparkCubingByLayer.java    License: Apache License 2.0 5 votes vote down vote up
@Override
public Tuple2<ByteArray, Object[]> call(String[] rowArray) throws Exception {
    if (initialized == false) {
        synchronized (SparkCubingByLayer.class) {
            if (initialized == false) {
                KylinConfig kConfig = AbstractHadoopJob.loadKylinConfigFromHdfs(conf, metaUrl);
                try (KylinConfig.SetAndUnsetThreadLocalConfig autoUnset = KylinConfig
                        .setAndUnsetThreadLocalConfig(kConfig)) {
                    CubeInstance cubeInstance = CubeManager.getInstance(kConfig).getCube(cubeName);
                    CubeDesc cubeDesc = cubeInstance.getDescriptor();
                    CubeSegment cubeSegment = cubeInstance.getSegmentById(segmentId);
                    CubeJoinedFlatTableEnrich interDesc = new CubeJoinedFlatTableEnrich(
                            EngineFactory.getJoinedFlatTableDesc(cubeSegment), cubeDesc);
                    long baseCuboidId = Cuboid.getBaseCuboidId(cubeDesc);
                    Cuboid baseCuboid = Cuboid.findForMandatory(cubeDesc, baseCuboidId);
                    baseCuboidBuilder = new BaseCuboidBuilder(kConfig, cubeDesc, cubeSegment, interDesc,
                            AbstractRowKeyEncoder.createInstance(cubeSegment, baseCuboid),
                            MeasureIngester.create(cubeDesc.getMeasures()), cubeSegment.buildDictionaryMap());
                    initialized = true;
                }
            }
        }
    }
    baseCuboidBuilder.resetAggrs();
    byte[] rowKey = baseCuboidBuilder.buildKey(rowArray);
    Object[] result = baseCuboidBuilder.buildValueObjects(rowArray);
    return new Tuple2<>(new ByteArray(rowKey), result);
}
 
Example 22
Source Project: kylin   Source File: KylinHealthCheckJob.java    License: Apache License 2.0 5 votes vote down vote up
private void checkCubeHoles(List<CubeInstance> cubes) {
    reporter.log("## Checking holes of Cubes");
    for (CubeInstance cube : cubes) {
        if (cube.isReady()) {
            List<CubeSegment> holes = cubeManager.calculateHoles(cube.getName());
            if (holes.size() > 0) {
                reporter.log("{} holes in cube: {}, project: {}", holes.size(), cube.getName(), cube.getProject());
            }
        }
    }
}
 
Example 23
Source Project: kylin   Source File: GTScanReqSerDerTest.java    License: Apache License 2.0 5 votes vote down vote up
@Test
public void testGTInfo() {
    CubeInstance cube = CubeManager.getInstance(KylinConfig.getInstanceFromEnv()).getCube("test_kylin_cube_with_slr_ready");
    CubeSegment segment = cube.getFirstSegment();

    Cuboid baseCuboid = Cuboid.getBaseCuboid(cube.getDescriptor());
    GTInfo info = CubeGridTable.newGTInfo(baseCuboid, new CubeDimEncMap(segment));
    GTInfo.serializer.serialize(info, buffer);
    buffer.flip();

    GTInfo sInfo = GTInfo.serializer.deserialize(buffer);
    this.compareTwoGTInfo(info, sInfo);
}
 
Example 24
Source Project: kylin   Source File: MigrationController.java    License: Apache License 2.0 5 votes vote down vote up
@RequestMapping(value = "/{cubeName}/migrateRequest", method = { RequestMethod.PUT })
@ResponseBody
public String requestMigration(@PathVariable String cubeName, @RequestBody MigrationRequest request) {
    CubeInstance cube = getCubeInstance(cubeName);
    try {
        MigrationRuleSet.Context ctx = new MigrationRuleSet.Context(queryService, cube,
                getTargetHost(request.getTargetHost()), request.getProjectName());
        migrationService.requestMigration(cube, ctx);
    } catch (Exception e) {
        logger.error("Request migration failed.", e);
        throw new BadRequestException(e.getMessage());
    }
    return "ok";
}
 
Example 25
Source Project: kylin   Source File: HBaseJobSteps.java    License: Apache License 2.0 5 votes vote down vote up
public List<String> getMergingHDFSPaths() {
    final List<CubeSegment> mergingSegments = ((CubeInstance) seg.getRealization())
            .getMergingSegments(seg);
    Preconditions.checkState(mergingSegments.size() > 1,
            "there should be more than 2 segments to merge, target segment " + seg);
    final List<String> mergingHDFSPaths = Lists.newArrayList();
    for (CubeSegment merging : mergingSegments) {
        mergingHDFSPaths.add(getJobWorkingDir(merging.getLastBuildJobID()));
    }
    return mergingHDFSPaths;
}
 
Example 26
Source Project: kylin-on-parquet-v2   Source File: Coordinator.java    License: Apache License 2.0 5 votes vote down vote up
private List<StreamingCubeInfo> getEnableStreamingCubes() {
    List<StreamingCubeInfo> allCubes = getStreamingCubes();
    List<StreamingCubeInfo> result = Lists.newArrayList();
    for (StreamingCubeInfo cube : allCubes) {
        CubeInstance cubeInstance = CubeManager.getInstance(KylinConfig.getInstanceFromEnv())
                .getCube(cube.getCubeName());
        if (cubeInstance.getStatus() == RealizationStatusEnum.READY) {
            result.add(cube);
        }
    }
    return result;
}
 
Example 27
Source Project: kylin   Source File: CuboidStatsReaderUtil.java    License: Apache License 2.0 5 votes vote down vote up
public static Map<Long, Long> readCuboidStatsFromCube(Set<Long> cuboidIds, CubeInstance cubeInstance) {
    Map<Long, Long> statisticsMerged = null;
    try {
        statisticsMerged = readCuboidStatsAndSizeFromCube(cuboidIds, cubeInstance).getFirst();
    } catch (IOException e) {
        logger.warn("Fail to read statistics for cube " + cubeInstance.getName() + " due to " + e);
    }
    return statisticsMerged == null ? Collections.emptyMap() : statisticsMerged;
}
 
Example 28
Source Project: kylin-on-parquet-v2   Source File: BuildCubeWithEngine.java    License: Apache License 2.0 5 votes vote down vote up
private Set<Long> mockRecommendCuboids(CubeInstance cubeInstance, double maxRatio, int maxNumber) {
    Preconditions.checkArgument(maxRatio > 0.0 && maxRatio < 1.0);
    Preconditions.checkArgument(maxNumber > 0);
    Set<Long> cuboidsRecommend;
    Random rnd = new Random();

    // add some mandatory cuboids which are for other unit test
    // - org.apache.kylin.query.ITCombinationTest.testLimitEnabled
    // - org.apache.kylin.query.ITFailfastQueryTest.testPartitionNotExceedMaxScanBytes
    // - org.apache.kylin.query.ITFailfastQueryTest.testQueryNotExceedMaxScanBytes
    List<Set<String>> mandatoryDimensionSetList = Lists.newLinkedList();
    mandatoryDimensionSetList.add(Sets.newHashSet("CAL_DT"));
    mandatoryDimensionSetList.add(Sets.newHashSet("seller_id", "CAL_DT"));
    mandatoryDimensionSetList.add(Sets.newHashSet("LSTG_FORMAT_NAME", "slr_segment_cd"));
    Set<Long> mandatoryCuboids = cubeInstance.getDescriptor().generateMandatoryCuboids(mandatoryDimensionSetList);

    CuboidScheduler cuboidScheduler = cubeInstance.getCuboidScheduler();
    Set<Long> cuboidsCurrent = cuboidScheduler.getAllCuboidIds();
    long baseCuboid = cuboidScheduler.getBaseCuboidId();
    do {
        cuboidsRecommend = Sets.newHashSet();
        cuboidsRecommend.add(baseCuboid);
        cuboidsRecommend.addAll(mandatoryCuboids);
        for (long i = 1; i < baseCuboid; i++) {
            if (rnd.nextDouble() < maxRatio) { // add 5% cuboids
                cuboidsRecommend.add(i);
            }
            if (cuboidsRecommend.size() > maxNumber) {
                break;
            }
        }
    } while (cuboidsRecommend.equals(cuboidsCurrent));

    return cuboidsRecommend;
}
 
Example 29
Source Project: kylin   Source File: StreamingCubeSegment.java    License: Apache License 2.0 5 votes vote down vote up
public static StreamingCubeSegment parseSegment(CubeInstance cubeInstance, File segmentFolder,
        IStreamingSegmentStore segmentStore) {
    Pair<Long, Long> segmentStartEnd = CubeSegment.parseSegmentName(segmentFolder.getName());
    StreamingCubeSegment segment = new StreamingCubeSegment(cubeInstance, segmentStore, segmentStartEnd.getFirst(),
            segmentStartEnd.getSecond());

    State state = segmentStore.getSegmentState();
    segment.saveState(state);
    return segment;
}
 
Example 30
Source Project: kylin-on-parquet-v2   Source File: NSparkMergingJob.java    License: Apache License 2.0 5 votes vote down vote up
/**
 * Merge the segments that are contained in the given mergedSegment
 *
 * @param mergedSegment, new segment that expect to merge, which should contains a couple of ready segments.
 */
public static NSparkMergingJob merge(CubeSegment mergedSegment, String submitter, JobTypeEnum jobType, String jobId) {
    CubeInstance cube = mergedSegment.getCubeInstance();

    NSparkMergingJob job = new NSparkMergingJob();
    SimpleDateFormat format = new SimpleDateFormat("z yyyy-MM-dd HH:mm:ss", Locale.ROOT);
    format.setTimeZone(TimeZone.getTimeZone(cube.getConfig().getTimeZone()));

    StringBuilder builder = new StringBuilder();
    builder.append(jobType).append(" CUBE - ");
    builder.append(mergedSegment.getCubeInstance().getDisplayName()).append(" - ").append(mergedSegment.getName())
            .append(" - ");

    builder.append(format.format(new Date(System.currentTimeMillis())));
    job.setName(builder.toString());
    job.setId(jobId);
    job.setTargetSubject(mergedSegment.getModel().getUuid());
    job.setTargetSegments(Lists.newArrayList(String.valueOf(mergedSegment.getUuid())));
    job.setProject(mergedSegment.getProject());
    job.setJobType(jobType);
    job.setSubmitter(submitter);

    job.setParam(MetadataConstants.P_JOB_ID, jobId);
    job.setParam(MetadataConstants.P_PROJECT_NAME, cube.getProject());
    job.setParam(MetadataConstants.P_TARGET_MODEL, job.getTargetSubject());
    job.setParam(MetadataConstants.P_CUBE_ID, cube.getId());
    job.setParam(MetadataConstants.P_CUBE_NAME, cube.getName());
    job.setParam(MetadataConstants.P_SEGMENT_IDS, String.join(",", job.getTargetSegments()));
    job.setParam(CubingExecutableUtil.SEGMENT_ID, mergedSegment.getUuid());
    job.setParam(MetadataConstants.P_DATA_RANGE_START, mergedSegment.getSegRange().start.toString());
    job.setParam(MetadataConstants.P_DATA_RANGE_END, mergedSegment.getSegRange().end.toString());
    job.setParam(MetadataConstants.P_OUTPUT_META_URL, cube.getConfig().getMetadataUrl().toString());
    job.setParam(MetadataConstants.P_JOB_TYPE, String.valueOf(JobTypeEnum.INDEX_MERGE));

    JobStepFactory.addStep(job, JobStepType.RESOURCE_DETECT, cube);
    JobStepFactory.addStep(job, JobStepType.MERGING, cube);
    JobStepFactory.addStep(job, JobStepType.CLEAN_UP_AFTER_MERGE, cube);

    return job;
}