Java Code Examples for org.apache.kylin.common.util.Pair

The following examples show how to use org.apache.kylin.common.util.Pair. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: kylin-on-parquet-v2   Source File: RestClient.java    License: Apache License 2.0 6 votes vote down vote up
public Pair<String, String> getJobServerWithState() throws IOException {
    String url = baseUrl + "/service_discovery/state/is_active_job_node";
    HttpGet get = new HttpGet(url);
    HttpResponse response = null;
    try {
        response = client.execute(get);
        String msg = EntityUtils.toString(response.getEntity());

        if (response.getStatusLine().getStatusCode() != 200) {
            throw new IOException(INVALID_RESPONSE + response.getStatusLine().getStatusCode()
                    + " with getting job server state  " + url + "\n" + msg);
        }
        return Pair.newPair(host + ":" + port, msg);
    } finally {
        cleanup(get, response);
    }
}
 
Example 2
Source Project: kylin-on-parquet-v2   Source File: CuboidStatsReaderUtil.java    License: Apache License 2.0 6 votes vote down vote up
public static Pair<Map<Long, Long>, Long> readCuboidStatsWithSourceFromSegment(Set<Long> cuboidIds,
        CubeSegment cubeSegment) throws IOException {
    if (cubeSegment == null) {
        logger.warn("The cube segment can not be " + null);
        return null;
    }

    CubeStatsReader cubeStatsReader = new CubeStatsReader(cubeSegment, null, cubeSegment.getConfig());
    if (cubeStatsReader.getCuboidRowEstimatesHLL() == null
            || cubeStatsReader.getCuboidRowEstimatesHLL().isEmpty()) {
        logger.info("Cuboid Statistics is not enabled.");
        return null;
    }

    Map<Long, Long> cuboidsWithStatsAll = cubeStatsReader.getCuboidRowEstimatesHLL();
    Map<Long, Long> cuboidsWithStats = Maps.newHashMapWithExpectedSize(cuboidIds.size());
    for (Long cuboid : cuboidIds) {
        Long rowEstimate = cuboidsWithStatsAll.get(cuboid);
        if (rowEstimate == null) {
            logger.warn("Cannot get the row count stats for cuboid " + cuboid);
        } else {
            cuboidsWithStats.put(cuboid, rowEstimate);
        }
    }
    return new Pair<>(cuboidsWithStats, cubeStatsReader.sourceRowCount);
}
 
Example 3
Source Project: kylin   Source File: SegmentReEncoder.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Re-encode with both dimension and measure in encoded (Text) format.
 * @param key
 * @param value
 * @return
 * @throws IOException
 */
public Pair<Text, Text> reEncode(Text key, Text value) throws IOException {
    if (initialized == false) {
        throw new IllegalStateException("Not initialized");
    }
    Object[] measureObjs = new Object[measureDescs.size()];
    // re-encode measures if dictionary is used
    if (dictMeasures.size() > 0) {
        codec.decode(ByteBuffer.wrap(value.getBytes(), 0, value.getLength()), measureObjs);
        for (Pair<Integer, MeasureIngester> pair : dictMeasures) {
            int i = pair.getFirst();
            MeasureIngester ingester = pair.getSecond();
            measureObjs[i] = ingester.reEncodeDictionary(measureObjs[i], measureDescs.get(i), oldDicts, newDicts);
        }

        ByteBuffer valueBuf = codec.encode(measureObjs);
        textValue.set(valueBuf.array(), 0, valueBuf.position());
        return Pair.newPair(processKey(key), textValue);
    } else {
        return Pair.newPair(processKey(key), value);
    }
}
 
Example 4
Source Project: kylin-on-parquet-v2   Source File: QueryService.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * @param correctedSql
 * @param sqlRequest
 * @return
 * @throws Exception
 */
private SQLResponse executeRequest(String correctedSql, SQLRequest sqlRequest, Connection conn) throws Exception {
    Statement stat = null;
    ResultSet resultSet = null;
    boolean isPushDown = false;

    Pair<List<List<String>>, List<SelectedColumnMeta>> r = null;
    try {
        stat = conn.createStatement();
        processStatementAttr(stat, sqlRequest);
        resultSet = stat.executeQuery(correctedSql);

        r = createResponseFromResultSet(resultSet);

    } catch (SQLException sqlException) {
        r = pushDownQuery(sqlRequest, correctedSql, conn, sqlException);
        if (r == null)
            throw sqlException;

        isPushDown = true;
    } finally {
        close(resultSet, stat, null); //conn is passed in, not my duty to close
    }

    return buildSqlResponse(sqlRequest.getProject(), isPushDown, r.getFirst(), r.getSecond());
}
 
Example 5
Source Project: kylin-on-parquet-v2   Source File: TableService.java    License: Apache License 2.0 6 votes vote down vote up
public List<Pair<TableDesc, TableExtDesc>> extractHiveTableMeta(String[] tables, String project) throws Exception { // de-dup
    SetMultimap<String, String> db2tables = LinkedHashMultimap.create();
    for (String fullTableName : tables) {
        String[] parts = HadoopUtil.parseHiveTableName(fullTableName);
        db2tables.put(parts[0], parts[1]);
    }

    // load all tables first
    List<Pair<TableDesc, TableExtDesc>> allMeta = Lists.newArrayList();
    ProjectInstance projectInstance = getProjectManager().getProject(project);
    ISourceMetadataExplorer explr = SourceManager.getSource(projectInstance).getSourceMetadataExplorer();
    for (Map.Entry<String, String> entry : db2tables.entries()) {
        Pair<TableDesc, TableExtDesc> pair = explr.loadTableMetadata(entry.getKey(), entry.getValue(), project);
        TableDesc tableDesc = pair.getFirst();
        Preconditions.checkState(tableDesc.getDatabase().equals(entry.getKey().toUpperCase(Locale.ROOT)));
        Preconditions.checkState(tableDesc.getName().equals(entry.getValue().toUpperCase(Locale.ROOT)));
        Preconditions.checkState(tableDesc.getIdentity()
                .equals(entry.getKey().toUpperCase(Locale.ROOT) + "." + entry.getValue().toUpperCase(Locale.ROOT)));
        TableExtDesc extDesc = pair.getSecond();
        Preconditions.checkState(tableDesc.getIdentity().equals(extDesc.getIdentity()));
        allMeta.add(pair);
    }
    return allMeta;
}
 
Example 6
public static Map<String, Integer> getValidEncodings() {
    if (factoryMap == null)
        initFactoryMap();

    Map<String, Integer> result = Maps.newTreeMap();
    for (Pair<String, Integer> p : factoryMap.keySet()) {
        if (result.containsKey(p.getFirst())) {
            if (result.get(p.getFirst()) > p.getSecond()) {
                continue;//skip small versions
            }
        }

        result.put(p.getFirst(), p.getSecond());
    }
    result.put(DictionaryDimEnc.ENCODING_NAME, 1);
    return result;
}
 
Example 7
Source Project: kylin-on-parquet-v2   Source File: CubeHBaseEndpointRPC.java    License: Apache License 2.0 6 votes vote down vote up
@SuppressWarnings("unchecked")
private List<Pair<byte[], byte[]>> getEPKeyRanges(short baseShard, short shardNum, int totalShards) {
    if (shardNum == 0) {
        return Lists.newArrayList();
    }

    if (shardNum == totalShards) {
        //all shards
        return Lists.newArrayList(
                Pair.newPair(getByteArrayForShort((short) 0), getByteArrayForShort((short) (shardNum - 1))));
    } else if (baseShard + shardNum <= totalShards) {
        //endpoint end key is inclusive, so no need to append 0 or anything
        return Lists.newArrayList(Pair.newPair(getByteArrayForShort(baseShard),
                getByteArrayForShort((short) (baseShard + shardNum - 1))));
    } else {
        //0,1,2,3,4 wants 4,0
        return Lists.newArrayList(
                Pair.newPair(getByteArrayForShort(baseShard), getByteArrayForShort((short) (totalShards - 1))), //
                Pair.newPair(getByteArrayForShort((short) 0),
                        getByteArrayForShort((short) (baseShard + shardNum - totalShards - 1))));
    }
}
 
Example 8
protected static Pair<KeyHook, byte[][]> getKeyValuePair(int nSplit, String keyS, byte[] valueB) {
    KeyHook keyHook;
    byte[][] splitValueB = null;
    if (nSplit > 1) {
        if (logger.isDebugEnabled()) {
            logger.debug("Enable chunking for putting large cached object values, chunk size = " + nSplit
                    + ", original value bytes size = " + valueB.length);
        }
        String[] chunkKeySs = new String[nSplit];
        for (int i = 0; i < nSplit; i++) {
            chunkKeySs[i] = keyS + i;
        }
        keyHook = new KeyHook(chunkKeySs, null);
        splitValueB = splitBytes(valueB, nSplit);
    } else {
        if (logger.isDebugEnabled()) {
            logger.debug(
                    "Chunking not enabled, put the original value bytes to keyhook directly, original value bytes size = "
                            + valueB.length);
        }
        keyHook = new KeyHook(null, valueB);
    }

    return new Pair<>(keyHook, splitValueB);
}
 
Example 9
Source Project: kylin-on-parquet-v2   Source File: QueryService.java    License: Apache License 2.0 6 votes vote down vote up
public SQLResponse update(SQLRequest sqlRequest) throws Exception {
    // non select operations, only supported when enable pushdown
    logger.debug("Query pushdown enabled, redirect the non-select query to pushdown engine.");
    Connection conn = null;
    try {
        conn = QueryConnection.getConnection(sqlRequest.getProject());
        Pair<List<List<String>>, List<SelectedColumnMeta>> r = PushDownUtil.tryPushDownNonSelectQuery(
                sqlRequest.getProject(), sqlRequest.getSql(), conn.getSchema(), BackdoorToggles.getPrepareOnly());

        List<SelectedColumnMeta> columnMetas = Lists.newArrayList();
        columnMetas.add(new SelectedColumnMeta(false, false, false, false, 1, false, Integer.MAX_VALUE, "c0", "c0",
                null, null, null, Integer.MAX_VALUE, 128, 1, "char", false, false, false));

        return buildSqlResponse(sqlRequest.getProject(), true, r.getFirst(), columnMetas);

    } catch (Exception e) {
        logger.info("pushdown engine failed to finish current non-select query");
        throw e;
    } finally {
        close(null, null, conn);
    }
}
 
Example 10
Source Project: kylin   Source File: CubeSegment.java    License: Apache License 2.0 6 votes vote down vote up
public static Pair<Long, Long> parseSegmentName(String segmentName) {
    if ("FULL".equals(segmentName)) {
        return new Pair<>(0L, 0L);
    }
    String[] startEnd = segmentName.split("_");
    if (startEnd.length != 2) {
        throw new IllegalArgumentException("the segmentName is illegal: " + segmentName);
    }
    SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss", Locale.ROOT);
    dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));

    try {
        long dateRangeStart = dateFormat.parse(startEnd[0]).getTime();
        long dateRangeEnd = dateFormat.parse(startEnd[1]).getTime();
        return new Pair<>(dateRangeStart, dateRangeEnd);
    } catch (ParseException e) {
        throw new IllegalArgumentException("Invalid segmentName for CubeSegment, segmentName = " + segmentName);
    }
}
 
Example 11
Source Project: kylin   Source File: Segments.java    License: Apache License 2.0 6 votes vote down vote up
public Pair<Boolean, Boolean> fitInSegments(ISegment newOne) {
    if (this.isEmpty())
        return null;

    ISegment first = this.get(0);
    ISegment last = this.get(this.size() - 1);
    Endpoint start = newOne.getSegRange().start;
    Endpoint end = newOne.getSegRange().end;
    boolean startFit = false;
    boolean endFit = false;
    for (ISegment sss : this) {
        if (sss == newOne)
            continue;
        startFit = startFit || (start.equals(sss.getSegRange().start) || start.equals(sss.getSegRange().end));
        endFit = endFit || (end.equals(sss.getSegRange().start) || end.equals(sss.getSegRange().end));
    }
    if (!startFit && endFit && newOne == first)
        startFit = true;
    if (!endFit && startFit && newOne == last)
        endFit = true;

    return Pair.newPair(startFit, endFit);
}
 
Example 12
Source Project: kylin   Source File: TupleExpressionSerializerTest.java    License: Apache License 2.0 6 votes vote down vote up
@Test
public void testSerialization() {
    TblColRef colD = TblColRef.mockup(t, 1, "C1", "string");
    TblColRef colM = TblColRef.mockup(t, 2, "C2", "decimal");
    BigDecimal value = BigDecimal.valueOf(10L);

    ColumnTupleFilter colFilter = new ColumnTupleFilter(colD);
    ConstantTupleFilter constFilter = new ConstantTupleFilter("col");
    CompareTupleFilter compareFilter = new CompareTupleFilter(TupleFilter.FilterOperatorEnum.EQ);
    compareFilter.addChild(colFilter);
    compareFilter.addChild(constFilter);

    ColumnTupleExpression colTuple = new ColumnTupleExpression(colM);
    ConstantTupleExpression constTuple = new ConstantTupleExpression(value);

    Pair<TupleFilter, TupleExpression> whenEntry = new Pair<TupleFilter, TupleExpression>(compareFilter, colTuple);
    CaseTupleExpression caseTuple = new CaseTupleExpression(Lists.newArrayList(whenEntry), constTuple);

    byte[] result = TupleExpressionSerializer.serialize(caseTuple, StringCodeSystem.INSTANCE);

    TupleExpression desTuple = TupleExpressionSerializer.deserialize(result, StringCodeSystem.INSTANCE);
    assertEquals(caseTuple, desTuple);
}
 
Example 13
Source Project: kylin   Source File: SqlNodeConverter.java    License: Apache License 2.0 6 votes vote down vote up
private SqlNode convertSqlCall(SqlCall sqlCall) {
    SqlOperator operator = sqlCall.getOperator();
    if (operator != null) {
        Pair<SqlNode, SqlNode> matched = convMaster.matchSqlFunc(sqlCall);

        if (matched != null) {
            Preconditions.checkState(matched.getFirst() instanceof SqlCall);
            SqlCall sourceTmpl = (SqlCall) matched.getFirst();

            Preconditions.checkState(sourceTmpl.operandCount() == sqlCall.operandCount());
            SqlNode targetTmpl = matched.getSecond();

            boolean isWindowCall = sourceTmpl.getOperator() instanceof SqlOverOperator;
            SqlParamsFinder sqlParamsFinder = SqlParamsFinder.newInstance(sourceTmpl, sqlCall, isWindowCall);
            return targetTmpl.accept(new SqlFuncFiller(sqlParamsFinder.getParamNodes(), isWindowCall));
        }
    }
    return null;
}
 
Example 14
Source Project: kylin-on-parquet-v2   Source File: LookupTable.java    License: Apache License 2.0 6 votes vote down vote up
public Pair<T, T> mapRange(String col, T beginValue, T endValue, String returnCol) {
    int colIdx = tableDesc.findColumnByName(col).getZeroBasedIndex();
    int returnIdx = tableDesc.findColumnByName(returnCol).getZeroBasedIndex();
    Comparator<T> colComp = getComparator(colIdx);
    Comparator<T> returnComp = getComparator(returnIdx);

    T returnBegin = null;
    T returnEnd = null;
    for (T[] row : data.values()) {
        if (between(beginValue, row[colIdx], endValue, colComp)) {
            T returnValue = row[returnIdx];
            if (returnBegin == null || returnComp.compare(returnValue, returnBegin) < 0) {
                returnBegin = returnValue;
            }
            if (returnEnd == null || returnComp.compare(returnValue, returnEnd) > 0) {
                returnEnd = returnValue;
            }
        }
    }
    if (returnBegin == null && returnEnd == null)
        return null;
    else
        return Pair.newPair(returnBegin, returnEnd);
}
 
Example 15
Source Project: kylin   Source File: TableService.java    License: Apache License 2.0 6 votes vote down vote up
public void checkHiveTableCompatibility(String prj, TableDesc tableDesc) throws Exception {
    Preconditions.checkNotNull(tableDesc.getDatabase());
    Preconditions.checkNotNull(tableDesc.getName());

    String database = tableDesc.getDatabase().toUpperCase(Locale.ROOT);
    String tableName = tableDesc.getName().toUpperCase(Locale.ROOT);
    ProjectInstance projectInstance = getProjectManager().getProject(prj);
    ISourceMetadataExplorer explr = SourceManager.getSource(projectInstance).getSourceMetadataExplorer();

    TableDesc hiveTableDesc;
    try {
        Pair<TableDesc, TableExtDesc> pair = explr.loadTableMetadata(database, tableName, prj);
        hiveTableDesc = pair.getFirst();
    } catch (Exception e) {
        logger.error("Fail to get metadata for hive table {} due to ", tableDesc.getIdentity(), e);
        throw new RuntimeException("Fail to get metadata for hive table " + tableDesc.getIdentity());
    }

    TableSchemaUpdateChecker.CheckResult result = getSchemaUpdateChecker().allowMigrate(tableDesc, hiveTableDesc);
    result.raiseExceptionWhenInvalid();
}
 
Example 16
Source Project: kylin   Source File: CubeHBaseRPC.java    License: Apache License 2.0 6 votes vote down vote up
private RawScan preparedHBaseScan(GTRecord pkStart, GTRecord pkEnd, List<GTRecord> fuzzyKeys, ImmutableBitSet selectedColBlocks) {
    final List<Pair<byte[], byte[]>> selectedColumns = makeHBaseColumns(selectedColBlocks);

    LazyRowKeyEncoder encoder = new LazyRowKeyEncoder(cubeSeg, cuboid);
    byte[] start = encoder.createBuf();
    byte[] end = encoder.createBuf();

    encoder.setBlankByte(RowConstants.ROWKEY_LOWER_BYTE);
    encoder.encode(pkStart, pkStart.getInfo().getPrimaryKey(), start);

    encoder.setBlankByte(RowConstants.ROWKEY_UPPER_BYTE);
    encoder.encode(pkEnd, pkEnd.getInfo().getPrimaryKey(), end);
    byte[] temp = new byte[end.length + 1];//append extra 0 to the end key to make it inclusive while scanning
    System.arraycopy(end, 0, temp, 0, end.length);
    end = temp;

    List<Pair<byte[], byte[]>> hbaseFuzzyKeys = translateFuzzyKeys(fuzzyKeys);

    KylinConfig config = cubeSeg.getCubeDesc().getConfig();
    int hbaseCaching = config.getHBaseScanCacheRows();
    int hbaseMaxResultSize = config.getHBaseScanMaxResultSize();
    //        if (isMemoryHungry(selectedColBlocks))
    //            hbaseCaching /= 10;

    return new RawScan(start, end, selectedColumns, hbaseFuzzyKeys, hbaseCaching, hbaseMaxResultSize);
}
 
Example 17
Source Project: kylin   Source File: RawScan.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void serialize(RawScan value, ByteBuffer out) {
    BytesUtil.writeByteArray(value.startKey, out);
    BytesUtil.writeByteArray(value.endKey, out);
    BytesUtil.writeVInt(value.hbaseColumns.size(), out);
    for (Pair<byte[], byte[]> hbaseColumn : value.hbaseColumns) {
        BytesUtil.writeByteArray(hbaseColumn.getFirst(), out);
        BytesUtil.writeByteArray(hbaseColumn.getSecond(), out);
    }
    BytesUtil.writeVInt(value.fuzzyKeys.size(), out);
    for (Pair<byte[], byte[]> fuzzyKey : value.fuzzyKeys) {
        BytesUtil.writeByteArray(fuzzyKey.getFirst(), out);
        BytesUtil.writeByteArray(fuzzyKey.getSecond(), out);
    }
    BytesUtil.writeVInt(value.hbaseCaching, out);
    BytesUtil.writeVInt(value.hbaseMaxResultSize, out);
}
 
Example 18
Source Project: kylin-on-parquet-v2   Source File: RawScan.java    License: Apache License 2.0 6 votes vote down vote up
@Override
public void serialize(RawScan value, ByteBuffer out) {
    BytesUtil.writeByteArray(value.startKey, out);
    BytesUtil.writeByteArray(value.endKey, out);
    BytesUtil.writeVInt(value.hbaseColumns.size(), out);
    for (Pair<byte[], byte[]> hbaseColumn : value.hbaseColumns) {
        BytesUtil.writeByteArray(hbaseColumn.getFirst(), out);
        BytesUtil.writeByteArray(hbaseColumn.getSecond(), out);
    }
    BytesUtil.writeVInt(value.fuzzyKeys.size(), out);
    for (Pair<byte[], byte[]> fuzzyKey : value.fuzzyKeys) {
        BytesUtil.writeByteArray(fuzzyKey.getFirst(), out);
        BytesUtil.writeByteArray(fuzzyKey.getSecond(), out);
    }
    BytesUtil.writeVInt(value.hbaseCaching, out);
    BytesUtil.writeVInt(value.hbaseMaxResultSize, out);
}
 
Example 19
Source Project: kylin   Source File: InvertIndexSearcher.java    License: Apache License 2.0 6 votes vote down vote up
public InvertIndexSearcher(CuboidMetaInfo cuboidMetaInfo, TblColRef[] cols, ByteBuffer idxBuffer)
        throws IOException {
    Map<String, Pair<Integer, Integer>> columnMetas = Maps.newHashMap();
    for (DimensionMetaInfo dimensionInfo : cuboidMetaInfo.getDimensionsInfo()) {
        for (TblColRef col : cols) {
            if (dimensionInfo.getName().equals(col.getName())) {
                columnMetas
                        .put(col.getName(), new Pair<>(dimensionInfo.getStartOffset() + dimensionInfo.getDataLength(), dimensionInfo.getIndexLength()));
            }
        }
    }
    for (Map.Entry<String, Pair<Integer, Integer>> columnMeta : columnMetas.entrySet()) {
        String colName = columnMeta.getKey();
        Pair<Integer, Integer> positionInfo = columnMeta.getValue();
        int offset = positionInfo.getFirst();
        int length = positionInfo.getSecond();
        //start offset of this column
        ByteBuffer colIdxBuf = idxBuffer.asReadOnlyBuffer();
        colIdxBuf.position(offset);
        colIdxBuf.limit(offset + length);
        ColInvertIndexSearcher colIndexSearcher = ColInvertIndexSearcher.load(colIdxBuf);
        colIndexSearchers.put(colName, colIndexSearcher);
    }
}
 
Example 20
Source Project: kylin   Source File: GTAggregateScanner.java    License: Apache License 2.0 5 votes vote down vote up
private void enqueueFromDump(int index) {
    if (dumpIterators.get(index) != null && dumpIterators.get(index).hasNext()) {
        Pair<byte[], byte[]> pair = dumpIterators.get(index).next();
        minHeap.offer(new SimpleEntry(pair.getFirst(), index));
        Object[] metricValues = new Object[metrics.trueBitCount()];
        measureCodec.decode(ByteBuffer.wrap(pair.getSecond()), metricValues);
        dumpCurrentValues.set(index, metricValues);
    }
}
 
Example 21
public Hits(int nRows, double hitRatio, double indexRatio) {
    Random rand = new Random();

    hitsForRowScanWithIndex = new boolean[nRows];
    hitsForRowScanNoIndex = new boolean[nRows];

    // for row scan
    int blockSize = (int) (1.0 / indexRatio);
    int nBlocks = nRows / blockSize;

    for (int i = 0; i < nBlocks; i++) {

        if (rand.nextDouble() < hitRatio) {
            for (int j = 0; j < blockSize; j++) {
                hitsForRowScanNoIndex[i * blockSize + j] = true;
                hitsForRowScanWithIndex[i * blockSize + j] = true;
            }
        } else {
            // case of not hit
            hitsForRowScanNoIndex[i * blockSize] = true;
        }
    }

    hitsForColumnScan = Lists.newArrayList();

    // for column scan
    int nColumns = 20;
    int logicRows = nRows / nColumns;
    for (int i = 0; i < nColumns; i++) {
        if (rand.nextDouble() < hitRatio) {
            hitsForColumnScan.add(Pair.newPair(i * logicRows, (i + 1) * logicRows));
        }
    }

}
 
Example 22
Source Project: kylin   Source File: QueryGeneratorCLI.java    License: Apache License 2.0 5 votes vote down vote up
public Pair<List<String>, double[]> execute(String cubeName, int sizeOfQueryList, String outputPath)
        throws Exception {
    this.outputPath = outputPath;
    this.sizeOfQueryList = sizeOfQueryList;

    return run(cubeName, true);
}
 
Example 23
Source Project: kylin   Source File: AbstractExecutable.java    License: Apache License 2.0 5 votes vote down vote up
protected void sendMail(Pair<String, String> email) {
    try {
        List<String> users = getAllNofifyUsers(config);
        if (users.isEmpty()) {
            logger.debug(NO_NEED_TO_SEND_EMAIL_USER_LIST_IS_EMPTY);
            return;
        }
        doSendMail(config, users, email);
    } catch (Exception e) {
        logger.error("error send email", e);
    }
}
 
Example 24
Source Project: kylin-on-parquet-v2   Source File: QueryService.java    License: Apache License 2.0 5 votes vote down vote up
private Pair<List<List<String>>, List<SelectedColumnMeta>> createResponseFromResultSet(ResultSet resultSet)
        throws Exception {
    List<List<String>> results = Lists.newArrayList();
    List<SelectedColumnMeta> columnMetas = Lists.newArrayList();

    ResultSetMetaData metaData = resultSet.getMetaData();
    int columnCount = metaData.getColumnCount();

    // Fill in selected column meta
    for (int i = 1; i <= columnCount; ++i) {
        columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i),
                metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i), metaData.isSigned(i),
                metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i),
                metaData.getSchemaName(i), metaData.getCatalogName(i), metaData.getTableName(i),
                metaData.getPrecision(i), metaData.getScale(i), metaData.getColumnType(i),
                metaData.getColumnTypeName(i), metaData.isReadOnly(i), metaData.isWritable(i),
                metaData.isDefinitelyWritable(i)));
    }

    // fill in results
    while (resultSet.next()) {
        List<String> oneRow = Lists.newArrayListWithCapacity(columnCount);
        for (int i = 0; i < columnCount; i++) {
            oneRow.add((resultSet.getString(i + 1)));
        }

        results.add(oneRow);
    }

    return new Pair<>(results, columnMetas);
}
 
Example 25
public static Pair<String, String> getTableNameSplits(String tableName) {
    if (Strings.isNullOrEmpty(tableName)) {
        return null;
    }

    String[] splits = tableName.split(Pattern.quote("."));
    int i = 0;
    String database = splits.length == 1 ? KYLIN_PREFIX : splits[i++];
    String tableNameOnly = splits[i];
    return new Pair<>(database, tableNameOnly);
}
 
Example 26
Source Project: kylin-on-parquet-v2   Source File: CuboidStatsUtilTest.java    License: Apache License 2.0 5 votes vote down vote up
private Map<Long, Map<Long, Pair<Long, Long>>> simulateRollingUpCount() {
    Map<Long, Map<Long, Pair<Long, Long>>> rollingUpCountMap = Maps.newLinkedHashMap();

    rollingUpCountMap.put(239L, new HashMap<Long, Pair<Long, Long>>() {
        {
            put(255L, new Pair<>(990L, 10L));
        }
    });

    rollingUpCountMap.put(178L, new HashMap<Long, Pair<Long, Long>>() {
        {
            put(255L, new Pair<>(4999L, 1L));
        }
    });

    rollingUpCountMap.put(187L, new HashMap<Long, Pair<Long, Long>>() {
        {
            put(251L, new Pair<>(3000L, 1000L));
        }
    });

    rollingUpCountMap.put(0L, new HashMap<Long, Pair<Long, Long>>() {
        {
            put(2L, new Pair<>(19L, 1L));
        }
    });

    return rollingUpCountMap;
}
 
Example 27
Source Project: kylin-on-parquet-v2   Source File: GTAggregateScanner.java    License: Apache License 2.0 5 votes vote down vote up
private void enqueueFromDump(int index) {
    if (dumpIterators.get(index) != null && dumpIterators.get(index).hasNext()) {
        Pair<byte[], byte[]> pair = dumpIterators.get(index).next();
        minHeap.offer(new SimpleEntry(pair.getFirst(), index));
        Object[] metricValues = new Object[metrics.trueBitCount()];
        measureCodec.decode(ByteBuffer.wrap(pair.getSecond()), metricValues);
        dumpCurrentValues.set(index, metricValues);
    }
}
 
Example 28
Source Project: kylin-on-parquet-v2   Source File: CuboidRecommenderUtil.java    License: Apache License 2.0 5 votes vote down vote up
/** For future segment level recommend */
public static Map<Long, Long> getRecommendCuboidList(CubeSegment segment, Map<Long, Long> hitFrequencyMap,
        Map<Long, Map<Long, Pair<Long, Long>>> rollingUpCountSourceMap, boolean ifForceRecommend)
        throws IOException {
    if (segment == null) {
        return null;
    }

    CubeStatsReader cubeStatsReader = new CubeStatsReader(segment, null, segment.getConfig());
    if (cubeStatsReader.getCuboidRowEstimatesHLL() == null
            || cubeStatsReader.getCuboidRowEstimatesHLL().isEmpty()) {
        logger.info("Cuboid Statistics is not enabled.");
        return null;
    }
    CubeInstance cube = segment.getCubeInstance();
    long baseCuboid = cube.getCuboidScheduler().getBaseCuboidId();
    if (cubeStatsReader.getCuboidRowEstimatesHLL().get(baseCuboid) == null
            || cubeStatsReader.getCuboidRowEstimatesHLL().get(baseCuboid) == 0L) {
        logger.info(BASE_CUBOID_COUNT_IN_CUBOID_STATISTICS_IS_ZERO);
        return null;
    }

    String key = cube.getName() + "-" + segment.getName();
    CuboidStats cuboidStats = new CuboidStats.Builder(key, baseCuboid, cubeStatsReader.getCuboidRowEstimatesHLL(),
            cubeStatsReader.getCuboidSizeMap()).setHitFrequencyMap(hitFrequencyMap)
                    .setRollingUpCountSourceMap(rollingUpCountSourceMap).build();
    return CuboidRecommender.getInstance().getRecommendCuboidList(cuboidStats, segment.getConfig(),
            ifForceRecommend);
}
 
Example 29
Source Project: kylin   Source File: HiveCmdStep.java    License: Apache License 2.0 5 votes vote down vote up
protected void createFlatHiveTable(KylinConfig config) throws IOException {
    final HiveCmdBuilder hiveCmdBuilder = new HiveCmdBuilder();
    hiveCmdBuilder.overwriteHiveProps(config.getHiveConfigOverride());
    hiveCmdBuilder.addStatement(getCmd());
    final String cmd = hiveCmdBuilder.toString();

    stepLogger.log("cmd: ");
    stepLogger.log(cmd);

    Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger);
    getManager().addJobInfo(getId(), stepLogger.getInfo());
    if (response.getFirst() != 0) {
        throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst());
    }
}
 
Example 30
Source Project: kylin-on-parquet-v2   Source File: SqoopCmdStep.java    License: Apache License 2.0 5 votes vote down vote up
protected void sqoopFlatHiveTable(KylinConfig config) throws IOException {
    String cmd = getParam("cmd");
    cmd = String.format(Locale.ROOT, "%s/bin/sqoop import -Dorg.apache.sqoop.splitter.allow_text_splitter=true "
            + generateSqoopConfigArgString() + cmd, config.getSqoopHome());
    stepLogger.log(String.format(Locale.ROOT, "exe cmd:%s", cmd));
    Pair<Integer, String> response = config.getCliCommandExecutor().execute(cmd, stepLogger, null);
    getManager().addJobInfo(getId(), stepLogger.getInfo());
    if (response.getFirst() != 0) {
        throw new RuntimeException("Failed to create flat hive table, error code " + response.getFirst());
    }
}