Java Code Examples for org.apache.kylin.common.KylinConfig#getJdbcSourceUser()

The following examples show how to use org.apache.kylin.common.KylinConfig#getJdbcSourceUser() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: JdbcHiveInputBase.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
public JdbcBaseBatchCubingInputSide(IJoinedFlatTableDesc flatDesc, boolean skipCacheMeta) {
    super(flatDesc);
    if (!skipCacheMeta) {
        KylinConfig config = KylinConfig.getInstanceFromEnv();
        String connectionUrl = config.getJdbcSourceConnectionUrl();
        String driverClass = config.getJdbcSourceDriver();
        String jdbcUser = config.getJdbcSourceUser();
        String jdbcPass = config.getJdbcSourcePass();
        dbconf = new DBConnConf(driverClass, connectionUrl, jdbcUser, jdbcPass);
        dialect = SourceDialect.getDialect(config.getJdbcSourceDialect());
        jdbcMetadataDialect = JdbcMetadataFactory.getJdbcMetadata(dialect, dbconf);
        calCachedJdbcMeta(metaMap, dbconf, jdbcMetadataDialect);
        if (logger.isTraceEnabled()) {
            StringBuilder dumpInfo = new StringBuilder();
            metaMap.forEach((k, v) -> dumpInfo.append("CachedMetadata: ").append(k).append(" => ").append(v)
                    .append(System.lineSeparator()));
            logger.trace(dumpInfo.toString());
        }
    }
}
 
Example 2
Source File: SourceConnectorFactory.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
public static JdbcConnector getJdbcConnector(KylinConfig config) {
    String jdbcUrl = config.getJdbcSourceConnectionUrl();
    String jdbcDriver = config.getJdbcSourceDriver();
    String jdbcUser = config.getJdbcSourceUser();
    String jdbcPass = config.getJdbcSourcePass();
    String adaptorClazz = config.getJdbcSourceAdaptor();

    AdaptorConfig jdbcConf = new AdaptorConfig(jdbcUrl, jdbcDriver, jdbcUser, jdbcPass);
    jdbcConf.poolMaxIdle = config.getPoolMaxIdle(null);
    jdbcConf.poolMinIdle = config.getPoolMinIdle(null);
    jdbcConf.poolMaxTotal = config.getPoolMaxTotal(null);
    jdbcConf.datasourceId = config.getJdbcSourceDialect();

    if (adaptorClazz == null)
        adaptorClazz = decideAdaptorClassName(jdbcConf.datasourceId);

    try {
        return new JdbcConnector(AdaptorFactory.createJdbcAdaptor(adaptorClazz, jdbcConf));
    } catch (Exception e) {
        throw new RuntimeException("Failed to get JdbcConnector from env.", e);
    }
}
 
Example 3
Source File: JdbcHiveInputBase.java    From kylin with Apache License 2.0 6 votes vote down vote up
public JdbcBaseBatchCubingInputSide(IJoinedFlatTableDesc flatDesc, boolean skipCacheMeta) {
    super(flatDesc);
    if (!skipCacheMeta) {
        KylinConfig config = KylinConfig.getInstanceFromEnv();
        String connectionUrl = config.getJdbcSourceConnectionUrl();
        String driverClass = config.getJdbcSourceDriver();
        String jdbcUser = config.getJdbcSourceUser();
        String jdbcPass = config.getJdbcSourcePass();
        dbconf = new DBConnConf(driverClass, connectionUrl, jdbcUser, jdbcPass);
        dialect = SourceDialect.getDialect(config.getJdbcSourceDialect());
        jdbcMetadataDialect = JdbcMetadataFactory.getJdbcMetadata(dialect, dbconf);
        calCachedJdbcMeta(metaMap, dbconf, jdbcMetadataDialect);
        if (logger.isTraceEnabled()) {
            StringBuilder dumpInfo = new StringBuilder();
            metaMap.forEach((k, v) -> dumpInfo.append("CachedMetadata: ").append(k).append(" => ").append(v)
                    .append(System.lineSeparator()));
            logger.trace(dumpInfo.toString());
        }
    }
}
 
Example 4
Source File: SourceConnectorFactory.java    From kylin with Apache License 2.0 6 votes vote down vote up
public static JdbcConnector getJdbcConnector(KylinConfig config) {
    String jdbcUrl = config.getJdbcSourceConnectionUrl() == null ? config.getJdbcUrl(null) : config.getJdbcSourceConnectionUrl();
    String jdbcDriver = config.getJdbcSourceDriver() == null ? config.getJdbcDriverClass(null) : config.getJdbcSourceDriver();
    String jdbcUser = config.getJdbcSourceUser() == null ? config.getJdbcUsername(null) : config.getJdbcSourceUser();
    String jdbcPass = config.getJdbcSourcePass() == null ? config.getJdbcPassword(null) : config.getJdbcSourcePass();
    String adaptorClazz = config.getJdbcSourceAdaptor();

    AdaptorConfig jdbcConf = new AdaptorConfig(jdbcUrl, jdbcDriver, jdbcUser, jdbcPass);
    jdbcConf.poolMaxIdle = config.getPoolMaxIdle(null);
    jdbcConf.poolMinIdle = config.getPoolMinIdle(null);
    jdbcConf.poolMaxTotal = config.getPoolMaxTotal(null);
    jdbcConf.datasourceId = config.getJdbcSourceDialect();

    if (adaptorClazz == null)
        adaptorClazz = decideAdaptorClassName(jdbcConf.datasourceId);

    try {
        return new JdbcConnector(AdaptorFactory.createJdbcAdaptor(adaptorClazz, jdbcConf));
    } catch (Exception e) {
        throw new RuntimeException("Failed to get JdbcConnector from env.", e);
    }
}
 
Example 5
Source File: JdbcExplorer.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
public JdbcExplorer(KylinConfig config) {
    String connectionUrl = config.getJdbcSourceConnectionUrl();
    String driverClass = config.getJdbcSourceDriver();
    String jdbcUser = config.getJdbcSourceUser();
    String jdbcPass = config.getJdbcSourcePass();
    this.dbconf = new DBConnConf(driverClass, connectionUrl, jdbcUser, jdbcPass);
    this.dialect = SourceDialect.getDialect(config.getJdbcSourceDialect());
    this.jdbcMetadataDialect = JdbcMetadataFactory.getJdbcMetadata(dialect, dbconf);
}
 
Example 6
Source File: JdbcTableReader.java    From kylin-on-parquet-v2 with Apache License 2.0 5 votes vote down vote up
/**
 * Constructor for reading whole jdbc table
 * @param dbName
 * @param tableName
 * @throws IOException
 */
public JdbcTableReader(String dbName, String tableName) throws IOException {
    this.dbName = dbName;
    this.tableName = tableName;
    KylinConfig config = KylinConfig.getInstanceFromEnv();
    String connectionUrl = config.getJdbcSourceConnectionUrl();
    String driverClass = config.getJdbcSourceDriver();
    String jdbcUser = config.getJdbcSourceUser();
    String jdbcPass = config.getJdbcSourcePass();
    dbconf = new DBConnConf(driverClass, connectionUrl, jdbcUser, jdbcPass);
    jdbcCon = SqlUtil.getConnection(dbconf);
    IJdbcMetadata meta = JdbcMetadataFactory
            .getJdbcMetadata(SourceDialect.getDialect(config.getJdbcSourceDialect()), dbconf);

    Map<String, String> metadataCache = new TreeMap<>();
    JdbcHiveInputBase.JdbcBaseBatchCubingInputSide.calCachedJdbcMeta(metadataCache, dbconf, meta);
    String database = JdbcHiveInputBase.getSchemaQuoted(metadataCache, dbName, meta, true);
    String table = JdbcHiveInputBase.getTableIdentityQuoted(dbName, tableName, metadataCache, meta, true);

    String sql = String.format(Locale.ROOT, "select * from %s.%s", database, table);
    try {
        statement = jdbcCon.createStatement();
        rs = statement.executeQuery(sql);
        colCount = rs.getMetaData().getColumnCount();
    } catch (SQLException e) {
        throw new IOException(String.format(Locale.ROOT, "error while exec %s", sql), e);
    }
}
 
Example 7
Source File: JdbcExplorer.java    From kylin with Apache License 2.0 5 votes vote down vote up
public JdbcExplorer(KylinConfig config) {
    String connectionUrl = config.getJdbcSourceConnectionUrl();
    String driverClass = config.getJdbcSourceDriver();
    String jdbcUser = config.getJdbcSourceUser();
    String jdbcPass = config.getJdbcSourcePass();
    this.dbconf = new DBConnConf(driverClass, connectionUrl, jdbcUser, jdbcPass);
    this.dialect = SourceDialect.getDialect(config.getJdbcSourceDialect());
    this.jdbcMetadataDialect = JdbcMetadataFactory.getJdbcMetadata(dialect, dbconf);
}
 
Example 8
Source File: JdbcTableReader.java    From kylin with Apache License 2.0 5 votes vote down vote up
/**
 * Constructor for reading whole jdbc table
 * @param dbName
 * @param tableName
 * @throws IOException
 */
public JdbcTableReader(String dbName, String tableName) throws IOException {
    this.dbName = dbName;
    this.tableName = tableName;
    KylinConfig config = KylinConfig.getInstanceFromEnv();
    String connectionUrl = config.getJdbcSourceConnectionUrl();
    String driverClass = config.getJdbcSourceDriver();
    String jdbcUser = config.getJdbcSourceUser();
    String jdbcPass = config.getJdbcSourcePass();
    dbconf = new DBConnConf(driverClass, connectionUrl, jdbcUser, jdbcPass);
    jdbcCon = SqlUtil.getConnection(dbconf);
    IJdbcMetadata meta = JdbcMetadataFactory
            .getJdbcMetadata(SourceDialect.getDialect(config.getJdbcSourceDialect()), dbconf);

    Map<String, String> metadataCache = new TreeMap<>();
    JdbcHiveInputBase.JdbcBaseBatchCubingInputSide.calCachedJdbcMeta(metadataCache, dbconf, meta);
    String database = JdbcHiveInputBase.getSchemaQuoted(metadataCache, dbName, meta, true);
    String table = JdbcHiveInputBase.getTableIdentityQuoted(dbName, tableName, metadataCache, meta, true);

    String sql = String.format(Locale.ROOT, "select * from %s.%s", database, table);
    try {
        statement = jdbcCon.createStatement();
        rs = statement.executeQuery(sql);
        colCount = rs.getMetaData().getColumnCount();
    } catch (SQLException e) {
        throw new IOException(String.format(Locale.ROOT, "error while exec %s", sql), e);
    }
}
 
Example 9
Source File: JdbcHiveInputBase.java    From kylin-on-parquet-v2 with Apache License 2.0 4 votes vote down vote up
protected AbstractExecutable createSqoopToFlatHiveStep(String jobWorkingDir, String cubeName) {
    KylinConfig config = getConfig();
    PartitionDesc partitionDesc = flatDesc.getDataModel().getPartitionDesc();
    String partCol = null;

    if (partitionDesc.isPartitioned()) {
        partCol = partitionDesc.getPartitionDateColumn();//tablename.colname
    }

    String splitTableAlias;
    String splitColumn;
    String splitDatabase;
    TblColRef splitColRef = determineSplitColumn();
    splitTableAlias = splitColRef.getTableAlias();

    splitColumn = getColumnIdentityQuoted(splitColRef, jdbcMetadataDialect, metaMap, true);
    splitDatabase = splitColRef.getColumnDesc().getTable().getDatabase();

    String selectSql = generateSelectDataStatementRDBMS(flatDesc, true, new String[] { partCol },
            jdbcMetadataDialect, metaMap);
    selectSql = escapeQuotationInSql(selectSql);

    String hiveTable = flatDesc.getTableName();
    String connectionUrl = config.getJdbcSourceConnectionUrl();
    String driverClass = config.getJdbcSourceDriver();
    String jdbcUser = config.getJdbcSourceUser();
    String jdbcPass = config.getJdbcSourcePass();
    String sqoopHome = config.getSqoopHome();
    String sqoopNullString = config.getSqoopNullString();
    String sqoopNullNonString = config.getSqoopNullNonString();
    String filedDelimiter = config.getJdbcSourceFieldDelimiter();
    int mapperNum = config.getSqoopMapperNum();

    String bquery = String.format(Locale.ROOT, "SELECT min(%s), max(%s) FROM %s.%s ", splitColumn,
            splitColumn, getSchemaQuoted(metaMap, splitDatabase, jdbcMetadataDialect, true),
            getTableIdentityQuoted(splitColRef.getTableRef(), metaMap, jdbcMetadataDialect, true));
    if (partitionDesc.isPartitioned()) {
        SegmentRange segRange = flatDesc.getSegRange();
        if (segRange != null && !segRange.isInfinite()) {
            if (partitionDesc.getPartitionDateColumnRef().getTableAlias().equals(splitTableAlias)
                    && (partitionDesc.getPartitionTimeColumnRef() == null || partitionDesc
                            .getPartitionTimeColumnRef().getTableAlias().equals(splitTableAlias))) {

                String quotedPartCond = partitionDesc.getPartitionConditionBuilder().buildDateRangeCondition(
                        partitionDesc, flatDesc.getSegment(), segRange,
                        col -> getTableColumnIdentityQuoted(col, jdbcMetadataDialect, metaMap, true));
                bquery += " WHERE " + quotedPartCond;
            }
        }
    }
    bquery = escapeQuotationInSql(bquery);

    // escape ` in cmd
    splitColumn = escapeQuotationInSql(splitColumn);

    String cmd = String.format(Locale.ROOT, "%s/bin/sqoop import" + generateSqoopConfigArgString()
            + "--connect \"%s\" --driver %s --username %s --password \"%s\" --query \"%s AND \\$CONDITIONS\" "
            + "--target-dir %s/%s --split-by %s --boundary-query \"%s\" --null-string '%s' "
            + "--null-non-string '%s' --fields-terminated-by '%s' --num-mappers %d", sqoopHome, connectionUrl,
            driverClass, jdbcUser, jdbcPass, selectSql, jobWorkingDir, hiveTable, splitColumn, bquery,
            sqoopNullString, sqoopNullNonString, filedDelimiter, mapperNum);
    logger.debug("sqoop cmd : {}", cmd);
    CmdStep step = new CmdStep();
    step.setCmd(cmd);
    step.setName(ExecutableConstants.STEP_NAME_SQOOP_TO_FLAT_HIVE_TABLE);
    return step;
}
 
Example 10
Source File: JdbcHiveInputBase.java    From kylin with Apache License 2.0 4 votes vote down vote up
protected AbstractExecutable createSqoopToFlatHiveStep(String jobWorkingDir, String cubeName) {
    KylinConfig config = getConfig();
    PartitionDesc partitionDesc = flatDesc.getDataModel().getPartitionDesc();
    String partCol = null;

    if (partitionDesc.isPartitioned()) {
        partCol = partitionDesc.getPartitionDateColumn();//tablename.colname
    }

    String splitTableAlias;
    String splitColumn;
    String splitDatabase;
    TblColRef splitColRef = determineSplitColumn();
    splitTableAlias = splitColRef.getTableAlias();

    splitColumn = getColumnIdentityQuoted(splitColRef, jdbcMetadataDialect, metaMap, true);
    splitDatabase = splitColRef.getColumnDesc().getTable().getDatabase();

    String selectSql = generateSelectDataStatementRDBMS(flatDesc, true, new String[] { partCol },
            jdbcMetadataDialect, metaMap);
    selectSql = escapeQuotationInSql(selectSql);

    String hiveTable = flatDesc.getTableName();
    String connectionUrl = config.getJdbcSourceConnectionUrl();
    String driverClass = config.getJdbcSourceDriver();
    String jdbcUser = config.getJdbcSourceUser();
    String jdbcPass = config.getJdbcSourcePass();
    String sqoopHome = config.getSqoopHome();
    String sqoopNullString = config.getSqoopNullString();
    String sqoopNullNonString = config.getSqoopNullNonString();
    String filedDelimiter = config.getJdbcSourceFieldDelimiter();
    int mapperNum = config.getSqoopMapperNum();

    String bquery = String.format(Locale.ROOT, "SELECT min(%s), max(%s) FROM %s.%s ", splitColumn,
            splitColumn, getSchemaQuoted(metaMap, splitDatabase, jdbcMetadataDialect, true),
            getTableIdentityQuoted(splitColRef.getTableRef(), metaMap, jdbcMetadataDialect, true));
    if (partitionDesc.isPartitioned()) {
        SegmentRange segRange = flatDesc.getSegRange();
        if (segRange != null && !segRange.isInfinite()) {
            if (partitionDesc.getPartitionDateColumnRef().getTableAlias().equals(splitTableAlias)
                    && (partitionDesc.getPartitionTimeColumnRef() == null || partitionDesc
                            .getPartitionTimeColumnRef().getTableAlias().equals(splitTableAlias))) {

                String quotedPartCond = partitionDesc.getPartitionConditionBuilder().buildDateRangeCondition(
                        partitionDesc, flatDesc.getSegment(), segRange,
                        col -> getTableColumnIdentityQuoted(col, jdbcMetadataDialect, metaMap, true));
                bquery += " WHERE " + quotedPartCond;
            }
        }
    }
    bquery = escapeQuotationInSql(bquery);

    // escape ` in cmd
    splitColumn = escapeQuotationInSql(splitColumn);

    String cmd = String.format(Locale.ROOT, "%s/bin/sqoop import" + generateSqoopConfigArgString()
            + "--connect \"%s\" --driver %s --username %s --password \"%s\" --query \"%s AND \\$CONDITIONS\" "
            + "--target-dir %s/%s --split-by %s --boundary-query \"%s\" --null-string '%s' "
            + "--null-non-string '%s' --fields-terminated-by '%s' --num-mappers %d", sqoopHome, connectionUrl,
            driverClass, jdbcUser, jdbcPass, selectSql, jobWorkingDir, hiveTable, splitColumn, bquery,
            sqoopNullString, sqoopNullNonString, filedDelimiter, mapperNum);
    logger.debug("sqoop cmd : {}", cmd);
    CmdStep step = new CmdStep();
    step.setCmd(cmd);
    step.setName(ExecutableConstants.STEP_NAME_SQOOP_TO_FLAT_HIVE_TABLE);
    return step;
}