Java Code Examples for org.apache.hadoop.util.Shell#MAC

The following examples show how to use org.apache.hadoop.util.Shell#MAC . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: LocalWithSparkSessionTest.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void beforeClass() {

    if (Shell.MAC)
        System.setProperty("org.xerial.snappy.lib.name", "libsnappyjava.jnilib");//for snappy

    sparkConf = new SparkConf().setAppName(UUID.randomUUID().toString()).setMaster("local[4]");
    sparkConf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer");
    sparkConf.set(StaticSQLConf.CATALOG_IMPLEMENTATION().key(), "in-memory");
    sparkConf.set("spark.sql.shuffle.partitions", "1");
    sparkConf.set("spark.memory.fraction", "0.1");
    // opt memory
    sparkConf.set("spark.shuffle.detectCorrupt", "false");
    // For sinai_poc/query03, enable implicit cross join conversion
    sparkConf.set("spark.sql.crossJoin.enabled", "true");

    ss = SparkSession.builder().config(sparkConf).getOrCreate();
    KylinSparkEnv.setSparkSession(ss);
    UdfManager.create(ss);

    System.out.println("Check spark sql config [spark.sql.catalogImplementation = "
            + ss.conf().get("spark.sql.catalogImplementation") + "]");
}
 
Example 2
Source File: NFilePruningTest.java    From kylin-on-parquet-v2 with Apache License 2.0 6 votes vote down vote up
@BeforeClass
public static void beforeClass() {

    if (Shell.MAC)
        System.setProperty("org.xerial.snappy.lib.name", "libsnappyjava.jnilib");//for snappy

    sparkConf = new SparkConf().setAppName(UUID.randomUUID().toString()).setMaster("local[4]");
    sparkConf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer");
    sparkConf.set(StaticSQLConf.CATALOG_IMPLEMENTATION().key(), "in-memory");
    sparkConf.set("spark.sql.shuffle.partitions", "1");
    sparkConf.set("spark.memory.fraction", "0.1");
    // opt memory
    sparkConf.set("spark.shuffle.detectCorrupt", "false");
    // For sinai_poc/query03, enable implicit cross join conversion
    sparkConf.set("spark.sql.crossJoin.enabled", "true");

    ss = SparkSession.builder().config(sparkConf).getOrCreate();
    KylinSparkEnv.setSparkSession(ss);
    UdfManager.create(ss);

    System.out.println("Check spark sql config [spark.sql.catalogImplementation = "
            + ss.conf().get("spark.sql.catalogImplementation") + "]");
}
 
Example 3
Source File: Stat.java    From hadoop with Apache License 2.0 6 votes vote down vote up
@Override
protected String[] getExecString() {
  String derefFlag = "-";
  if (dereference) {
    derefFlag = "-L";
  }
  if (Shell.LINUX) {
    return new String[] {
        "stat", derefFlag + "c", "%s,%F,%Y,%X,%a,%U,%G,%N", path.toString() };
  } else if (Shell.FREEBSD || Shell.MAC) {
    return new String[] {
        "stat", derefFlag + "f", "%z,%HT,%m,%a,%Op,%Su,%Sg,`link' -> `%Y'",
        path.toString() };
  } else {
    throw new UnsupportedOperationException(
        "stat is not supported on this platform");
  }
}
 
Example 4
Source File: Stat.java    From big-c with Apache License 2.0 6 votes vote down vote up
@Override
protected String[] getExecString() {
  String derefFlag = "-";
  if (dereference) {
    derefFlag = "-L";
  }
  if (Shell.LINUX) {
    return new String[] {
        "stat", derefFlag + "c", "%s,%F,%Y,%X,%a,%U,%G,%N", path.toString() };
  } else if (Shell.FREEBSD || Shell.MAC) {
    return new String[] {
        "stat", derefFlag + "f", "%z,%HT,%m,%a,%Op,%Su,%Sg,`link' -> `%Y'",
        path.toString() };
  } else {
    throw new UnsupportedOperationException(
        "stat is not supported on this platform");
  }
}
 
Example 5
Source File: DU.java    From hadoop-ozone with Apache License 2.0 5 votes vote down vote up
private String[] constructCommand() {
  List<String> parts = new LinkedList<>();
  parts.add("du");
  parts.add("-sk");
  if (excludePattern != null) {
    if (Shell.MAC) {
      parts.add("-I");
    } else {
      parts.add("--exclude");
    }
    parts.add(excludePattern);
  }
  parts.add(getPath());
  return parts.toArray(new String[0]);
}
 
Example 6
Source File: OwnLocalResources.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public static String getThisLocalResourceTag(){
   /*
      2.6.32-431.29.2.el6.x86_64
      3.10.0-514.6.1.el7.x86_64
   */
  String osVersion = System.getProperty("os.version");
  if(osVersion == null){
    return null;
  }

  if(Shell.LINUX){

    if(osVersion.contains("el6")) {
      return LR_TAG_CENTOS6;
    }

    if(osVersion.contains("el7")) {
      return LR_TAG_CENTOS7;
    }
  }
  if(Shell.MAC){
    if(osVersion.startsWith("10.")) {
      return LR_TAG_MACOS10;
    }
  }

  return null;
}
 
Example 7
Source File: Stat.java    From hadoop with Apache License 2.0 5 votes vote down vote up
/**
 * Whether Stat is supported on the current platform
 * @return
 */
public static boolean isAvailable() {
  if (Shell.LINUX || Shell.FREEBSD || Shell.MAC) {
    return true;
  }
  return false;
}
 
Example 8
Source File: Stat.java    From big-c with Apache License 2.0 5 votes vote down vote up
/**
 * Whether Stat is supported on the current platform
 * @return
 */
public static boolean isAvailable() {
  if (Shell.LINUX || Shell.FREEBSD || Shell.MAC) {
    return true;
  }
  return false;
}