org.apache.spark.deploy.yarn.Client Java Examples

The following examples show how to use org.apache.spark.deploy.yarn.Client. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: SparkAppLauncher.java    From sylph with Apache License 2.0 6 votes vote down vote up
public ApplicationId run(Job job)
        throws Exception
{
    SparkJobConfig jobConfig = job.getConfig();

    System.setProperty("SPARK_YARN_MODE", "true");
    SparkConf sparkConf = new SparkConf();
    sparkConf.set("spark.driver.extraJavaOptions", "-XX:PermSize=64M -XX:MaxPermSize=128M");
    sparkConf.set("spark.yarn.stagingDir", appHome);
    //-------------
    sparkConf.set("spark.executor.instances", jobConfig.getNumExecutors() + "");   //EXECUTOR_COUNT
    sparkConf.set("spark.executor.memory", jobConfig.getExecutorMemory());  //EXECUTOR_MEMORY
    sparkConf.set("spark.executor.cores", jobConfig.getExecutorCores() + "");

    sparkConf.set("spark.driver.cores", jobConfig.getDriverCores() + "");
    sparkConf.set("spark.driver.memory", jobConfig.getDriverMemory());
    //--------------

    sparkConf.setSparkHome(sparkHome);

    sparkConf.setMaster("yarn");
    sparkConf.setAppName(job.getName());

    sparkConf.set("spark.submit.deployMode", "cluster"); // worked
    //set Depends set spark.yarn.dist.jars and spark.yarn.dist.files
    setDistJars(job, sparkConf);

    String[] args = getArgs();
    ClientArguments clientArguments = new ClientArguments(args);   // spark-2.0.0
    //yarnClient.getConfig().iterator().forEachRemaining(x -> sparkConf.set("spark.hadoop." + x.getKey(), x.getValue()));

    Client appClient = new SylphSparkYarnClient(clientArguments, sparkConf, yarnClient, jobConfig.getQueue());
    return appClient.submitApplication();
}
 
Example #2
Source File: SparkOnYarnContainer.java    From liteflow with Apache License 2.0 4 votes vote down vote up
@Override
public void run() throws Exception {

    ExecutorJob executorJob = this.getExecutorJob();

    String config = executorJob.getConfig();
    JSONObject configObj = null;
    if(StringUtils.isNotBlank(config)){
        configObj = JSONObject.parseObject(config);
    }

    Tuple<String, String> configTuple = this.generateConfigFile(JSONUtils.toJSONStringWithoutCircleDetect(configObj));
    String configName = configTuple.getA();
    String configPath = configTuple.getB();

    configObj.put(Constants.CONFIG_FILE_NAME, configName);
    configObj.put(Constants.JOB_CONFIG_PATH, configPath);
    /**
     * 初始化spark conf
     */
    this.sparkConf = initSparkConf(configObj);

    /**
     * 生成用户参数
     */
    ClientArguments clientArgs = getArgs(configObj);
    /**
     * 提交到yarn
     */
    Client client = new Client(clientArgs, this.sparkConf);
    ApplicationId applicationId = client.submitApplication();
    String appId = applicationId.toString();
    logger.info("{} get yarn applicationId:{}", executorJob.getId(), appId);
    ExecutorJobService executorJobService = ExecutorServiceUtils.getExecutorJobService();
    /**
     * 这只运行状态
     */
    this.setStatus(ContainerStatus.RUNNING);
    executorJob.setApplicationId(appId);
    executorJobService.bindApplicationIdAndRun(executorJob.getId(), appId);
}
 
Example #3
Source File: SparkYarnTaskConfiguration.java    From spring-cloud-task-app-starters with Apache License 2.0 4 votes vote down vote up
@Override
public void run(String... args) throws Exception {
    SparkConf sparkConf = new SparkConf();
    sparkConf.set("spark.yarn.jar", config.getAssemblyJar());

    List<String> submitArgs = new ArrayList<String>();
    if (StringUtils.hasText(config.getAppName())) {
        submitArgs.add("--name");
        submitArgs.add(config.getAppName());
    }
    submitArgs.add("--jar");
    submitArgs.add(config.getAppJar());
    submitArgs.add("--class");
    submitArgs.add(config.getAppClass());
    if (StringUtils.hasText(config.getResourceFiles())) {
        submitArgs.add("--files");
        submitArgs.add(config.getResourceFiles());
    }
    if (StringUtils.hasText(config.getResourceArchives())) {
        submitArgs.add("--archives");
        submitArgs.add(config.getResourceArchives());
    }
    submitArgs.add("--executor-memory");
    submitArgs.add(config.getExecutorMemory());
    submitArgs.add("--num-executors");
    submitArgs.add("" + config.getNumExecutors());
    for (String arg : config.getAppArgs()) {
        submitArgs.add("--arg");
        submitArgs.add(arg);
    }
    logger.info("Submit App with args: " + Arrays.asList(submitArgs));
    ClientArguments clientArguments =
            new ClientArguments(submitArgs.toArray(new String[submitArgs.size()]), sparkConf);
    clientArguments.isClusterMode();
    Client client = new Client(clientArguments, hadoopConfiguration, sparkConf);
    System.setProperty("SPARK_YARN_MODE", "true");
    try {
        client.run();
    } catch (Throwable t) {
        logger.error("Spark Application failed: " + t.getMessage(), t);
        throw new RuntimeException("Spark Application failed", t);
    }
}
 
Example #4
Source File: SparkYarnSubmitter.java    From sqoop-on-spark with Apache License 2.0 4 votes vote down vote up
public static void submit(String[] args, SparkConf sparkConf) {
  YarnConfiguration yarnConfig = new YarnConfiguration();
  // convert the *- site xml to yarn conf
  ClientArguments cArgs = new ClientArguments(args, sparkConf);
  new Client(cArgs, yarnConfig, sparkConf).run();
}