Java Code Examples for org.apache.kylin.metadata.model.ColumnDesc#isComputedColumn()

The following examples show how to use org.apache.kylin.metadata.model.ColumnDesc#isComputedColumn() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: H2Database.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
private String generateCreateH2TableSql(TableDesc tableDesc, String csvFilePath) {
    StringBuilder ddl = new StringBuilder();
    StringBuilder csvColumns = new StringBuilder();

    ddl.append("CREATE TABLE " + tableDesc.getIdentity() + "\n");
    ddl.append("(" + "\n");

    for (int i = 0; i < tableDesc.getColumns().length; i++) {
        ColumnDesc col = tableDesc.getColumns()[i];
        if (col.isComputedColumn()) {
            continue;
        }
        if (i > 0) {
            ddl.append(",");
            csvColumns.append(",");
        }
        ddl.append(col.getName() + " " + getH2DataType((col.getDatatype())) + "\n");
        csvColumns.append(col.getName());
    }
    ddl.append(")" + "\n");
    ddl.append("AS SELECT * FROM CSVREAD('" + csvFilePath + "', '" + csvColumns
            + "', 'charset=UTF-8 fieldSeparator=,');");

    return ddl.toString();
}
 
Example 2
Source File: TableSchemaUpdateChecker.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
/**
 * check whether all columns used in `cube` has compatible schema in current hive schema denoted by `fieldsMap`.
 * @param cube cube to check, must use `table` in its model
 * @param origTable kylin's table metadata
 * @param newTable current hive schema of `table`
 * @return columns in origTable that can't be found in newTable
 */
private List<String> checkAllColumnsInCube(CubeInstance cube, TableDesc origTable, TableDesc newTable) {
    Set<ColumnDesc> usedColumns = Sets.newHashSet();
    for (TblColRef col : cube.getAllColumns()) {
        usedColumns.add(col.getColumnDesc());
    }

    List<String> violateColumns = Lists.newArrayList();
    for (ColumnDesc column : origTable.getColumns()) {
        if (!column.isComputedColumn() && usedColumns.contains(column)) {
            ColumnDesc newCol = newTable.findColumnByName(column.getName());
            if (newCol == null || !isColumnCompatible(column, newCol)) {
                violateColumns.add(column.getName());
            }
        }
    }
    return violateColumns;
}
 
Example 3
Source File: H2Database.java    From kylin with Apache License 2.0 6 votes vote down vote up
private String generateCreateH2TableSql(TableDesc tableDesc, String csvFilePath) {
    StringBuilder ddl = new StringBuilder();
    StringBuilder csvColumns = new StringBuilder();

    ddl.append("CREATE TABLE " + tableDesc.getIdentity() + "\n");
    ddl.append("(" + "\n");

    for (int i = 0; i < tableDesc.getColumns().length; i++) {
        ColumnDesc col = tableDesc.getColumns()[i];
        if (col.isComputedColumn()) {
            continue;
        }
        if (i > 0) {
            ddl.append(",");
            csvColumns.append(",");
        }
        ddl.append(col.getName() + " " + getH2DataType((col.getDatatype())) + "\n");
        csvColumns.append(col.getName());
    }
    ddl.append(")" + "\n");
    ddl.append("AS SELECT * FROM CSVREAD('" + csvFilePath + "', '" + csvColumns
            + "', 'charset=UTF-8 fieldSeparator=,');");

    return ddl.toString();
}
 
Example 4
Source File: TableSchemaUpdateChecker.java    From kylin with Apache License 2.0 6 votes vote down vote up
/**
 * check whether all columns used in `cube` has compatible schema in current hive schema denoted by `fieldsMap`.
 * @param cube cube to check, must use `table` in its model
 * @param origTable kylin's table metadata
 * @param newTable current hive schema of `table`
 * @return columns in origTable that can't be found in newTable
 */
private List<String> checkAllColumnsInCube(CubeInstance cube, TableDesc origTable, TableDesc newTable) {
    Set<ColumnDesc> usedColumns = Sets.newHashSet();
    for (TblColRef col : cube.getAllColumns()) {
        usedColumns.add(col.getColumnDesc());
    }

    List<String> violateColumns = Lists.newArrayList();
    for (ColumnDesc column : origTable.getColumns()) {
        if (!column.isComputedColumn() && usedColumns.contains(column)) {
            ColumnDesc newCol = newTable.findColumnByName(column.getName());
            if (newCol == null || !isColumnCompatible(column, newCol)) {
                violateColumns.add(column.getName());
            }
        }
    }
    return violateColumns;
}
 
Example 5
Source File: TableSchemaUpdateChecker.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
private List<String> checkAllColumnsInFactTable(DataModelDesc usedModel, TableDesc factTable, TableDesc newTableDesc) {
    List<String> violateColumns = Lists.newArrayList();

    for (ColumnDesc column : findUsedColumnsInFactTable(usedModel, factTable)) {
        if (!column.isComputedColumn()) {
            ColumnDesc newCol = newTableDesc.findColumnByName(column.getName());
            if (newCol == null || !isColumnCompatible(column, newCol)) {
                violateColumns.add(column.getName());
            }
        }
    }
    return violateColumns;
}
 
Example 6
Source File: TableSchemaUpdateChecker.java    From kylin with Apache License 2.0 5 votes vote down vote up
private List<String> checkAllColumnsInFactTable(DataModelDesc usedModel, TableDesc factTable,
        TableDesc newTableDesc) {
    List<String> violateColumns = Lists.newArrayList();

    for (ColumnDesc column : findUsedColumnsInFactTable(usedModel, factTable)) {
        if (!column.isComputedColumn()) {
            ColumnDesc newCol = newTableDesc.findColumnByName(column.getName());
            if (newCol == null) {
                violateColumns.add(column.getName());
            }
        }
    }
    return violateColumns;
}