Java Code Examples for org.apache.kylin.metadata.model.TableDesc#getSourceType()

The following examples show how to use org.apache.kylin.metadata.model.TableDesc#getSourceType() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ModelService.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
public void validateModel(String project, DataModelDesc desc) throws IllegalArgumentException {
    String factTableName = desc.getRootFactTableName();
    TableDesc tableDesc = getTableManager().getTableDesc(factTableName, project);
    if ((tableDesc.getSourceType() == ISourceAware.ID_STREAMING || tableDesc.isStreamingTable())
            && (desc.getPartitionDesc() == null || desc.getPartitionDesc().getPartitionDateColumn() == null)) {
        throw new IllegalArgumentException("Must define a partition column.");
    }
}
 
Example 2
Source File: ModelService.java    From kylin with Apache License 2.0 5 votes vote down vote up
public void validateModel(String project, DataModelDesc desc) throws IllegalArgumentException {
    String factTableName = desc.getRootFactTableName();
    TableDesc tableDesc = getTableManager().getTableDesc(factTableName, project);
    if ((tableDesc.getSourceType() == ISourceAware.ID_STREAMING || tableDesc.isStreamingTable())
            && (desc.getPartitionDesc() == null || desc.getPartitionDesc().getPartitionDateColumn() == null)) {
        throw new IllegalArgumentException("Must define a partition column.");
    }
}
 
Example 3
Source File: StreamingV2Controller.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
private void validateInput(TableDesc tableDesc, StreamingSourceConfig streamingSourceConfig) {
    if (StringUtils.isEmpty(tableDesc.getIdentity()) || StringUtils.isEmpty(streamingSourceConfig.getName())) {
        logger.error("streamingSourceConfig name should not be empty.");
        throw new BadRequestException("streamingSourceConfig name should not be empty.");
    }

    // validate the compatibility for input table schema and the underline hive table schema
    if (tableDesc.getSourceType() == ISourceAware.ID_KAFKA_HIVE) {
        List<FieldSchema> fields;
        String db = tableDesc.getDatabase();
        try {
            HiveMetaStoreClient metaStoreClient = new HiveMetaStoreClient(new HiveConf());
            fields = metaStoreClient.getFields(db, tableDesc.getName());
            logger.info("Checking the {} in {}", tableDesc.getName(), db);
        } catch (NoSuchObjectException noObjectException) {
            logger.info("table not exist in hive meta store for table:" + tableDesc.getIdentity(),
                    noObjectException);
            throw new BadRequestException(
                    "table doesn't exist in hive meta store for table:" + tableDesc.getIdentity(),
                    ResponseCode.CODE_UNDEFINED, noObjectException);
        } catch (Exception e) {
            logger.error("error when get metadata from hive meta store for table:" + tableDesc.getIdentity(), e);
            throw new BadRequestException("error when connect hive meta store", ResponseCode.CODE_UNDEFINED, e);
        }
        // check the data type compatibility for each column
        Map<String, FieldSchema> fieldSchemaMap = Maps.newHashMap();
        for (FieldSchema field : fields) {
            fieldSchemaMap.put(field.getName().toUpperCase(Locale.ROOT), field);
        }
        List<String> incompatibleMsgs = Lists.newArrayList();
        for (ColumnDesc columnDesc : tableDesc.getColumns()) {
            FieldSchema fieldSchema = fieldSchemaMap.get(columnDesc.getName().toUpperCase(Locale.ROOT));
            if (fieldSchema == null) {
                // Partition column cannot be fetched via Hive Metadata API.
                if (!TimeDerivedColumnType.isTimeDerivedColumn(columnDesc.getName())) {
                    incompatibleMsgs.add("Column not exist in hive table:" + columnDesc.getName());
                    continue;
                } else {
                    logger.info("Column not exist in hive table: {}.", columnDesc.getName());
                    continue;
                }
            }
            if (!checkHiveTableFieldCompatible(fieldSchema, columnDesc)) {
                String msg = String.format(Locale.ROOT,
                        "column:%s defined in hive type:%s is incompatible with the column definition:%s",
                        columnDesc.getName(), fieldSchema.getType(), columnDesc.getDatatype());
                incompatibleMsgs.add(msg);
            }
        }
        if (!incompatibleMsgs.isEmpty()) {
            logger.info("incompatible for hive and input table schema:{}", incompatibleMsgs);
            throw new BadRequestException(
                    "incompatible for hive schema and input table schema:" + incompatibleMsgs);
        }
    }
}
 
Example 4
Source File: StreamingV2Controller.java    From kylin with Apache License 2.0 4 votes vote down vote up
private void validateInput(TableDesc tableDesc, StreamingSourceConfig streamingSourceConfig) {
    if (StringUtils.isEmpty(tableDesc.getIdentity()) || StringUtils.isEmpty(streamingSourceConfig.getName())) {
        logger.error("streamingSourceConfig name should not be empty.");
        throw new BadRequestException("streamingSourceConfig name should not be empty.");
    }

    // validate the compatibility for input table schema and the underline hive table schema
    if (tableDesc.getSourceType() == ISourceAware.ID_KAFKA_HIVE) {
        List<FieldSchema> fields;
        String db = tableDesc.getDatabase();
        try {
            HiveMetaStoreClient metaStoreClient = new HiveMetaStoreClient(new HiveConf());
            fields = metaStoreClient.getFields(db, tableDesc.getName());
            logger.info("Checking the {} in {}", tableDesc.getName(), db);
        } catch (NoSuchObjectException noObjectException) {
            logger.info("table not exist in hive meta store for table:" + tableDesc.getIdentity(),
                    noObjectException);
            throw new BadRequestException(
                    "table doesn't exist in hive meta store for table:" + tableDesc.getIdentity(),
                    ResponseCode.CODE_UNDEFINED, noObjectException);
        } catch (Exception e) {
            logger.error("error when get metadata from hive meta store for table:" + tableDesc.getIdentity(), e);
            throw new BadRequestException("error when connect hive meta store", ResponseCode.CODE_UNDEFINED, e);
        }
        // check the data type compatibility for each column
        Map<String, FieldSchema> fieldSchemaMap = Maps.newHashMap();
        for (FieldSchema field : fields) {
            fieldSchemaMap.put(field.getName().toUpperCase(Locale.ROOT), field);
        }
        List<String> incompatibleMsgs = Lists.newArrayList();
        for (ColumnDesc columnDesc : tableDesc.getColumns()) {
            FieldSchema fieldSchema = fieldSchemaMap.get(columnDesc.getName().toUpperCase(Locale.ROOT));
            if (fieldSchema == null) {
                // Partition column cannot be fetched via Hive Metadata API.
                if (!TimeDerivedColumnType.isTimeDerivedColumn(columnDesc.getName())) {
                    incompatibleMsgs.add("Column not exist in hive table:" + columnDesc.getName());
                    continue;
                } else {
                    logger.info("Column not exist in hive table: {}.", columnDesc.getName());
                    continue;
                }
            }
            if (!checkHiveTableFieldCompatible(fieldSchema, columnDesc)) {
                String msg = String.format(Locale.ROOT,
                        "column:%s defined in hive type:%s is incompatible with the column definition:%s",
                        columnDesc.getName(), fieldSchema.getType(), columnDesc.getDatatype());
                incompatibleMsgs.add(msg);
            }
        }
        if (!incompatibleMsgs.isEmpty()) {
            logger.info("incompatible for hive and input table schema:{}", incompatibleMsgs);
            throw new BadRequestException(
                    "incompatible for hive schema and input table schema:" + incompatibleMsgs);
        }
    }
}