Java Code Examples for org.apache.kylin.common.KylinConfig#writeProperties()

The following examples show how to use org.apache.kylin.common.KylinConfig#writeProperties() . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: AbstractHadoopJob.java    From Kylin with Apache License 2.0 5 votes vote down vote up
protected void attachKylinPropsAndMetadata(CubeInstance cube, Configuration conf) throws IOException {
    File tmp = File.createTempFile("kylin_job_meta", "");
    tmp.delete(); // we need a directory, so delete the file first

    File metaDir = new File(tmp, "meta");
    metaDir.mkdirs();
    metaDir.getParentFile().deleteOnExit();

    // write kylin.properties
    KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
    File kylinPropsFile = new File(metaDir, "kylin.properties");
    kylinConfig.writeProperties(kylinPropsFile);

    // write cube / model_desc / cube_desc / dict / table
    ArrayList<String> dumpList = new ArrayList<String>();
    dumpList.add(cube.getResourcePath());
    dumpList.add(cube.getDescriptor().getModel().getResourcePath());
    dumpList.add(cube.getDescriptor().getResourcePath());
    for (String tableName : cube.getDescriptor().getModel().getAllTables()) {
        TableDesc table = MetadataManager.getInstance(kylinConfig).getTableDesc(tableName);
        dumpList.add(table.getResourcePath());
    }

    for (CubeSegment segment : cube.getSegments()) {
        dumpList.addAll(segment.getDictionaryPaths());
    }

    dumpResources(kylinConfig, metaDir, dumpList);

    // hadoop distributed cache
    conf.set("tmpfiles", "file:///" + OptionsHelper.convertToFileURL(metaDir.getAbsolutePath()));
}
 
Example 2
Source File: AbstractHadoopJob.java    From Kylin with Apache License 2.0 5 votes vote down vote up
protected void attachKylinPropsAndMetadata(IIInstance ii, Configuration conf) throws IOException {
    File tmp = File.createTempFile("kylin_job_meta", "");
    tmp.delete(); // we need a directory, so delete the file first

    File metaDir = new File(tmp, "meta");
    metaDir.mkdirs();
    metaDir.getParentFile().deleteOnExit();

    // write kylin.properties
    KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
    File kylinPropsFile = new File(metaDir, "kylin.properties");
    kylinConfig.writeProperties(kylinPropsFile);

    // write II / model_desc / II_desc / dict / table
    ArrayList<String> dumpList = new ArrayList<String>();
    dumpList.add(ii.getResourcePath());
    dumpList.add(ii.getDescriptor().getModel().getResourcePath());
    dumpList.add(ii.getDescriptor().getResourcePath());

    for (String tableName : ii.getDescriptor().getModel().getAllTables()) {
        TableDesc table = MetadataManager.getInstance(kylinConfig).getTableDesc(tableName);
        dumpList.add(table.getResourcePath());
    }

    for (IISegment segment : ii.getSegments()) {
        dumpList.addAll(segment.getDictionaryPaths());
    }

    dumpResources(kylinConfig, metaDir, dumpList);

    // hadoop distributed cache
    conf.set("tmpfiles", "file:///" + OptionsHelper.convertToFileURL(metaDir.getAbsolutePath()));
}