Java Code Examples for org.apache.kylin.metadata.model.TableDesc#getDatabase()

The following examples show how to use org.apache.kylin.metadata.model.TableDesc#getDatabase() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: HiveTable.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
public HiveTable(TableDesc tableDesc) {
    this.database = tableDesc.getDatabase();
    this.hiveTable = tableDesc.getName();
    try {
        this.hiveTableMeta = getHiveClient().getHiveTableMeta(database, hiveTable);
    } catch (Exception e) {
        throw new RuntimeException("cannot get HiveTableMeta", e);
    }
}
 
Example 2
Source File: ValidateUtil.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
private Set<String> getAllColumns(String project, String table) throws IOException {
    List<TableDesc> tableDescByProject = tableService.getTableDescByProject(project, true);
    Set<String> cols = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);

    for (TableDesc tableDesc : tableDescByProject) {
        String tbl = tableDesc.getDatabase() + "." + tableDesc.getName();
        if (tbl.equalsIgnoreCase(table)) {
            for (ColumnDesc column : tableDesc.getColumns()) {
                cols.add(column.getName());
            }
            break;
        }
    }
    return cols;
}
 
Example 3
Source File: HiveTable.java    From kylin with Apache License 2.0 5 votes vote down vote up
public HiveTable(TableDesc tableDesc) {
    this.database = tableDesc.getDatabase();
    this.hiveTable = tableDesc.getName();
    try {
        this.hiveTableMeta = getHiveClient().getHiveTableMeta(database, hiveTable);
    } catch (Exception e) {
        throw new RuntimeException("cannot get HiveTableMeta", e);
    }
}
 
Example 4
Source File: ValidateUtil.java    From kylin with Apache License 2.0 5 votes vote down vote up
private Set<String> getAllColumns(String project, String table) throws IOException {
    List<TableDesc> tableDescByProject = tableService.getTableDescByProject(project, true);
    Set<String> cols = new TreeSet<>(String.CASE_INSENSITIVE_ORDER);

    for (TableDesc tableDesc : tableDescByProject) {
        String tbl = tableDesc.getDatabase() + "." + tableDesc.getName();
        if (tbl.equalsIgnoreCase(table)) {
            for (ColumnDesc column : tableDesc.getColumns()) {
                cols.add(column.getName());
            }
            break;
        }
    }
    return cols;
}
 
Example 5
Source File: HiveInputBase.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
protected static String getTableNameForHCat(TableDesc table, String uuid) {
    String tableName = (table.isView()) ? table.getMaterializedName(uuid) : table.getName();
    String database = (table.isView()) ? KylinConfig.getInstanceFromEnv().getHiveDatabaseForIntermediateTable()
            : table.getDatabase();
    return String.format(Locale.ROOT, "%s.%s", database, tableName).toUpperCase(Locale.ROOT);
}
 
Example 6
Source File: JdbcTable.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
public JdbcTable(JdbcConnector dataSource, TableDesc tableDesc) {
    this.dataSource = dataSource;
    this.database = tableDesc.getDatabase();
    this.tableName = tableDesc.getName();
}
 
Example 7
Source File: JdbcTable.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
public JdbcTable(TableDesc tableDesc) {
    this.database = tableDesc.getDatabase();
    this.tableName = tableDesc.getName();
}
 
Example 8
Source File: StreamingV2Controller.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
private void validateInput(TableDesc tableDesc, StreamingSourceConfig streamingSourceConfig) {
    if (StringUtils.isEmpty(tableDesc.getIdentity()) || StringUtils.isEmpty(streamingSourceConfig.getName())) {
        logger.error("streamingSourceConfig name should not be empty.");
        throw new BadRequestException("streamingSourceConfig name should not be empty.");
    }

    // validate the compatibility for input table schema and the underline hive table schema
    if (tableDesc.getSourceType() == ISourceAware.ID_KAFKA_HIVE) {
        List<FieldSchema> fields;
        String db = tableDesc.getDatabase();
        try {
            HiveMetaStoreClient metaStoreClient = new HiveMetaStoreClient(new HiveConf());
            fields = metaStoreClient.getFields(db, tableDesc.getName());
            logger.info("Checking the {} in {}", tableDesc.getName(), db);
        } catch (NoSuchObjectException noObjectException) {
            logger.info("table not exist in hive meta store for table:" + tableDesc.getIdentity(),
                    noObjectException);
            throw new BadRequestException(
                    "table doesn't exist in hive meta store for table:" + tableDesc.getIdentity(),
                    ResponseCode.CODE_UNDEFINED, noObjectException);
        } catch (Exception e) {
            logger.error("error when get metadata from hive meta store for table:" + tableDesc.getIdentity(), e);
            throw new BadRequestException("error when connect hive meta store", ResponseCode.CODE_UNDEFINED, e);
        }
        // check the data type compatibility for each column
        Map<String, FieldSchema> fieldSchemaMap = Maps.newHashMap();
        for (FieldSchema field : fields) {
            fieldSchemaMap.put(field.getName().toUpperCase(Locale.ROOT), field);
        }
        List<String> incompatibleMsgs = Lists.newArrayList();
        for (ColumnDesc columnDesc : tableDesc.getColumns()) {
            FieldSchema fieldSchema = fieldSchemaMap.get(columnDesc.getName().toUpperCase(Locale.ROOT));
            if (fieldSchema == null) {
                // Partition column cannot be fetched via Hive Metadata API.
                if (!TimeDerivedColumnType.isTimeDerivedColumn(columnDesc.getName())) {
                    incompatibleMsgs.add("Column not exist in hive table:" + columnDesc.getName());
                    continue;
                } else {
                    logger.info("Column not exist in hive table: {}.", columnDesc.getName());
                    continue;
                }
            }
            if (!checkHiveTableFieldCompatible(fieldSchema, columnDesc)) {
                String msg = String.format(Locale.ROOT,
                        "column:%s defined in hive type:%s is incompatible with the column definition:%s",
                        columnDesc.getName(), fieldSchema.getType(), columnDesc.getDatatype());
                incompatibleMsgs.add(msg);
            }
        }
        if (!incompatibleMsgs.isEmpty()) {
            logger.info("incompatible for hive and input table schema:{}", incompatibleMsgs);
            throw new BadRequestException(
                    "incompatible for hive schema and input table schema:" + incompatibleMsgs);
        }
    }
}
 
Example 9
Source File: HiveInputBase.java    From kylin with Apache License 2.0 4 votes vote down vote up
protected static String getTableNameForHCat(TableDesc table, String uuid) {
    String tableName = (table.isView()) ? table.getMaterializedName(uuid) : table.getName();
    String database = (table.isView()) ? KylinConfig.getInstanceFromEnv().getHiveDatabaseForIntermediateTable()
            : table.getDatabase();
    return String.format(Locale.ROOT, "%s.%s", database, tableName).toUpperCase(Locale.ROOT);
}
 
Example 10
Source File: JdbcTable.java    From kylin with Apache License 2.0 4 votes vote down vote up
public JdbcTable(JdbcConnector dataSource, TableDesc tableDesc) {
    this.dataSource = dataSource;
    this.database = tableDesc.getDatabase();
    this.tableName = tableDesc.getName();
}
 
Example 11
Source File: JdbcTable.java    From kylin with Apache License 2.0 4 votes vote down vote up
public JdbcTable(TableDesc tableDesc) {
    this.database = tableDesc.getDatabase();
    this.tableName = tableDesc.getName();
}
 
Example 12
Source File: StreamingV2Controller.java    From kylin with Apache License 2.0 4 votes vote down vote up
private void validateInput(TableDesc tableDesc, StreamingSourceConfig streamingSourceConfig) {
    if (StringUtils.isEmpty(tableDesc.getIdentity()) || StringUtils.isEmpty(streamingSourceConfig.getName())) {
        logger.error("streamingSourceConfig name should not be empty.");
        throw new BadRequestException("streamingSourceConfig name should not be empty.");
    }

    // validate the compatibility for input table schema and the underline hive table schema
    if (tableDesc.getSourceType() == ISourceAware.ID_KAFKA_HIVE) {
        List<FieldSchema> fields;
        String db = tableDesc.getDatabase();
        try {
            HiveMetaStoreClient metaStoreClient = new HiveMetaStoreClient(new HiveConf());
            fields = metaStoreClient.getFields(db, tableDesc.getName());
            logger.info("Checking the {} in {}", tableDesc.getName(), db);
        } catch (NoSuchObjectException noObjectException) {
            logger.info("table not exist in hive meta store for table:" + tableDesc.getIdentity(),
                    noObjectException);
            throw new BadRequestException(
                    "table doesn't exist in hive meta store for table:" + tableDesc.getIdentity(),
                    ResponseCode.CODE_UNDEFINED, noObjectException);
        } catch (Exception e) {
            logger.error("error when get metadata from hive meta store for table:" + tableDesc.getIdentity(), e);
            throw new BadRequestException("error when connect hive meta store", ResponseCode.CODE_UNDEFINED, e);
        }
        // check the data type compatibility for each column
        Map<String, FieldSchema> fieldSchemaMap = Maps.newHashMap();
        for (FieldSchema field : fields) {
            fieldSchemaMap.put(field.getName().toUpperCase(Locale.ROOT), field);
        }
        List<String> incompatibleMsgs = Lists.newArrayList();
        for (ColumnDesc columnDesc : tableDesc.getColumns()) {
            FieldSchema fieldSchema = fieldSchemaMap.get(columnDesc.getName().toUpperCase(Locale.ROOT));
            if (fieldSchema == null) {
                // Partition column cannot be fetched via Hive Metadata API.
                if (!TimeDerivedColumnType.isTimeDerivedColumn(columnDesc.getName())) {
                    incompatibleMsgs.add("Column not exist in hive table:" + columnDesc.getName());
                    continue;
                } else {
                    logger.info("Column not exist in hive table: {}.", columnDesc.getName());
                    continue;
                }
            }
            if (!checkHiveTableFieldCompatible(fieldSchema, columnDesc)) {
                String msg = String.format(Locale.ROOT,
                        "column:%s defined in hive type:%s is incompatible with the column definition:%s",
                        columnDesc.getName(), fieldSchema.getType(), columnDesc.getDatatype());
                incompatibleMsgs.add(msg);
            }
        }
        if (!incompatibleMsgs.isEmpty()) {
            logger.info("incompatible for hive and input table schema:{}", incompatibleMsgs);
            throw new BadRequestException(
                    "incompatible for hive schema and input table schema:" + incompatibleMsgs);
        }
    }
}
 
Example 13
Source File: HiveTable.java    From Kylin with Apache License 2.0 4 votes vote down vote up
public HiveTable(MetadataManager metaMgr, String table) {
    TableDesc tableDesc = metaMgr.getTableDesc(table);
    this.database = tableDesc.getDatabase();
    this.hiveTable = tableDesc.getName();
    this.nColumns = tableDesc.getColumnCount();
}