Java Code Examples for org.apache.hadoop.mapred.Counters#Counter

The following examples show how to use org.apache.hadoop.mapred.Counters#Counter . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ExtractJobMetrics.java    From hiped2 with Apache License 2.0 6 votes vote down vote up
public static String extractCounter(String counterFromHist,
                                    String... counterNames)
    throws ParseException {
  Counters counters =
      Counters.fromEscapedCompactString(counterFromHist);
  for (Counters.Group group : counters) {
    for (Counters.Counter counter : group) {
      for (String counterName : counterNames) {
        if (counterName.equals(counter.getName())) {
          return String.valueOf(counter.getCounter());
        }
      }
    }
  }
  return "";
}
 
Example 2
Source File: MergeManagerImpl.java    From big-c with Apache License 2.0 6 votes vote down vote up
private void combineAndSpill(
    RawKeyValueIterator kvIter,
    Counters.Counter inCounter) throws IOException {
  JobConf job = jobConf;
  Reducer combiner = ReflectionUtils.newInstance(combinerClass, job);
  Class<K> keyClass = (Class<K>) job.getMapOutputKeyClass();
  Class<V> valClass = (Class<V>) job.getMapOutputValueClass();
  RawComparator<K> comparator = 
    (RawComparator<K>)job.getCombinerKeyGroupingComparator();
  try {
    CombineValuesIterator values = new CombineValuesIterator(
        kvIter, comparator, keyClass, valClass, job, Reporter.NULL,
        inCounter);
    while (values.more()) {
      combiner.reduce(values.getKey(), values, combineCollector,
                      Reporter.NULL);
      values.nextKey();
    }
  } finally {
    combiner.close();
  }
}
 
Example 3
Source File: XGBoostTrainUDTF.java    From incubator-hivemall with Apache License 2.0 6 votes vote down vote up
@Nonnull
private static Booster train(@Nonnull final DMatrix dtrain, @Nonnegative final int round,
        @Nonnull final Map<String, Object> params, @Nullable final Reporter reporter)
        throws NoSuchMethodException, IllegalAccessException, InvocationTargetException,
        InstantiationException, XGBoostError {
    final Counters.Counter iterCounter = (reporter == null) ? null
            : reporter.getCounter("hivemall.XGBoostTrainUDTF$Counter", "iteration");

    final Booster booster = XGBoostUtils.createBooster(dtrain, params);
    for (int iter = 0; iter < round; iter++) {
        reportProgress(reporter);
        setCounterValue(iterCounter, iter + 1);

        booster.update(dtrain, iter);
    }
    return booster;
}
 
Example 4
Source File: ShuffleSchedulerImpl.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public ShuffleSchedulerImpl(JobConf job, TaskStatus status,
                        TaskAttemptID reduceId,
                        ExceptionReporter reporter,
                        Progress progress,
                        Counters.Counter shuffledMapsCounter,
                        Counters.Counter reduceShuffleBytes,
                        Counters.Counter failedShuffleCounter) {
  totalMaps = job.getNumMapTasks();
  abortFailureLimit = Math.max(30, totalMaps / 10);
  copyTimeTracker = new CopyTimeTracker();
  remainingMaps = totalMaps;
  finishedMaps = new boolean[remainingMaps];
  this.reporter = reporter;
  this.status = status;
  this.reduceId = reduceId;
  this.progress = progress;
  this.shuffledMapsCounter = shuffledMapsCounter;
  this.reduceShuffleBytes = reduceShuffleBytes;
  this.failedShuffleCounter = failedShuffleCounter;
  this.startTime = Time.monotonicNow();
  lastProgressTime = startTime;
  referee.start();
  this.maxFailedUniqueFetches = Math.min(totalMaps, 5);
  this.maxFetchFailuresBeforeReporting = job.getInt(
      MRJobConfig.SHUFFLE_FETCH_FAILURES, REPORT_FAILURE_LIMIT);
  this.reportReadErrorImmediately = job.getBoolean(
      MRJobConfig.SHUFFLE_NOTIFY_READERROR, true);

  this.maxDelay = job.getLong(MRJobConfig.MAX_SHUFFLE_FETCH_RETRY_DELAY,
      MRJobConfig.DEFAULT_MAX_SHUFFLE_FETCH_RETRY_DELAY);
  this.maxHostFailures = job.getInt(
      MRJobConfig.MAX_SHUFFLE_FETCH_HOST_FAILURES,
      MRJobConfig.DEFAULT_MAX_SHUFFLE_FETCH_HOST_FAILURES);
}
 
Example 5
Source File: OutputHandler.java    From big-c with Apache License 2.0 5 votes vote down vote up
public void incrementCounter(int id, long amount) throws IOException {
  if (id < registeredCounters.size()) {
    Counters.Counter counter = registeredCounters.get(id);
    counter.increment(amount);
  } else {
    throw new IOException("Invalid counter with id: " + id);
  }
}
 
Example 6
Source File: MRTaskReporter.java    From incubator-tez with Apache License 2.0 5 votes vote down vote up
public Counters.Counter getCounter(String group, String name) {
  TezCounter counter = context.getCounters().findCounter(group, name);
  MRCounters.MRCounter mrCounter = null;
  if (counter != null) {
    mrCounter = new MRCounters.MRCounter(counter);
  }
  return mrCounter;
}
 
Example 7
Source File: TestPipeApplication.java    From hadoop with Apache License 2.0 5 votes vote down vote up
public Counters.Counter getCounter(String group, String name) {
  Counters.Counter counter = null;
  if (counters != null) {
    counter = counters.findCounter(group, name);
    if (counter == null) {
      Group grp = counters.addGroup(group, group);
      counter = grp.addCounter(name, name, 10);
    }
  }
  return counter;
}
 
Example 8
Source File: HadoopJob.java    From hadoop-gpu with Apache License 2.0 5 votes vote down vote up
/**
 * Print this job counters (for debugging purpose)
 */
void printCounters() {
  System.out.printf("New Job:\n", counters);
  for (String groupName : counters.getGroupNames()) {
    Counters.Group group = counters.getGroup(groupName);
    System.out.printf("\t%s[%s]\n", groupName, group.getDisplayName());

    for (Counters.Counter counter : group) {
      System.out.printf("\t\t%s: %s\n", counter.getDisplayName(),
                                        counter.getCounter());
    }
  }
  System.out.printf("\n");
}
 
Example 9
Source File: MapRedCounterLoader.java    From parquet-mr with Apache License 2.0 5 votes vote down vote up
@Override
public ICounter getCounterByNameAndFlag(String groupName, String counterName, String counterFlag) {
  if (conf.getBoolean(counterFlag, true)) {
    Counters.Counter counter = reporter.getCounter(groupName, counterName);
    if (counter != null) {
      return new MapRedCounterAdapter(reporter.getCounter(groupName, counterName));
    }
  }
  return new BenchmarkCounter.NullCounter();
  }
 
Example 10
Source File: dummyReporter.java    From iow-hadoop-streaming with Apache License 2.0 4 votes vote down vote up
@Override
public Counters.Counter getCounter(String group, String name) {
    return null;
}
 
Example 11
Source File: HadoopV1Reporter.java    From ignite with Apache License 2.0 4 votes vote down vote up
/** {@inheritDoc} */
@Override public Counters.Counter getCounter(Enum<?> name) {
    return getCounter(name.getDeclaringClass().getName(), name.name());
}
 
Example 12
Source File: MergeManagerImpl.java    From hadoop with Apache License 2.0 4 votes vote down vote up
public MergeManagerImpl(TaskAttemptID reduceId, JobConf jobConf, 
                    FileSystem localFS,
                    LocalDirAllocator localDirAllocator,  
                    Reporter reporter,
                    CompressionCodec codec,
                    Class<? extends Reducer> combinerClass,
                    CombineOutputCollector<K,V> combineCollector,
                    Counters.Counter spilledRecordsCounter,
                    Counters.Counter reduceCombineInputCounter,
                    Counters.Counter mergedMapOutputsCounter,
                    ExceptionReporter exceptionReporter,
                    Progress mergePhase, MapOutputFile mapOutputFile) {
  this.reduceId = reduceId;
  this.jobConf = jobConf;
  this.localDirAllocator = localDirAllocator;
  this.exceptionReporter = exceptionReporter;
  
  this.reporter = reporter;
  this.codec = codec;
  this.combinerClass = combinerClass;
  this.combineCollector = combineCollector;
  this.reduceCombineInputCounter = reduceCombineInputCounter;
  this.spilledRecordsCounter = spilledRecordsCounter;
  this.mergedMapOutputsCounter = mergedMapOutputsCounter;
  this.mapOutputFile = mapOutputFile;
  this.mapOutputFile.setConf(jobConf);
  
  this.localFS = localFS;
  this.rfs = ((LocalFileSystem)localFS).getRaw();
  
  final float maxInMemCopyUse =
    jobConf.getFloat(MRJobConfig.SHUFFLE_INPUT_BUFFER_PERCENT,
        MRJobConfig.DEFAULT_SHUFFLE_INPUT_BUFFER_PERCENT);
  if (maxInMemCopyUse > 1.0 || maxInMemCopyUse < 0.0) {
    throw new IllegalArgumentException("Invalid value for " +
        MRJobConfig.SHUFFLE_INPUT_BUFFER_PERCENT + ": " +
        maxInMemCopyUse);
  }

  // Allow unit tests to fix Runtime memory
  this.memoryLimit = 
    (long)(jobConf.getLong(MRJobConfig.REDUCE_MEMORY_TOTAL_BYTES,
        Math.min(Runtime.getRuntime().maxMemory(), Integer.MAX_VALUE))
      * maxInMemCopyUse);
 
  this.ioSortFactor = jobConf.getInt(MRJobConfig.IO_SORT_FACTOR, 100);

  final float singleShuffleMemoryLimitPercent =
      jobConf.getFloat(MRJobConfig.SHUFFLE_MEMORY_LIMIT_PERCENT,
          DEFAULT_SHUFFLE_MEMORY_LIMIT_PERCENT);
  if (singleShuffleMemoryLimitPercent <= 0.0f
      || singleShuffleMemoryLimitPercent > 1.0f) {
    throw new IllegalArgumentException("Invalid value for "
        + MRJobConfig.SHUFFLE_MEMORY_LIMIT_PERCENT + ": "
        + singleShuffleMemoryLimitPercent);
  }

  usedMemory = 0L;
  commitMemory = 0L;
  this.maxSingleShuffleLimit = 
    (long)(memoryLimit * singleShuffleMemoryLimitPercent);
  this.memToMemMergeOutputsThreshold = 
          jobConf.getInt(MRJobConfig.REDUCE_MEMTOMEM_THRESHOLD, ioSortFactor);
  this.mergeThreshold = (long)(this.memoryLimit * 
                        jobConf.getFloat(MRJobConfig.SHUFFLE_MERGE_PERCENT, 
                                         0.90f));
  LOG.info("MergerManager: memoryLimit=" + memoryLimit + ", " +
           "maxSingleShuffleLimit=" + maxSingleShuffleLimit + ", " +
           "mergeThreshold=" + mergeThreshold + ", " + 
           "ioSortFactor=" + ioSortFactor + ", " +
           "memToMemMergeOutputsThreshold=" + memToMemMergeOutputsThreshold);

  if (this.maxSingleShuffleLimit >= this.mergeThreshold) {
    throw new RuntimeException("Invalid configuration: "
        + "maxSingleShuffleLimit should be less than mergeThreshold"
        + "maxSingleShuffleLimit: " + this.maxSingleShuffleLimit
        + "mergeThreshold: " + this.mergeThreshold);
  }

  boolean allowMemToMemMerge = 
    jobConf.getBoolean(MRJobConfig.REDUCE_MEMTOMEM_ENABLED, false);
  if (allowMemToMemMerge) {
    this.memToMemMerger = 
      new IntermediateMemoryToMemoryMerger(this,
                                           memToMemMergeOutputsThreshold);
    this.memToMemMerger.start();
  } else {
    this.memToMemMerger = null;
  }
  
  this.inMemoryMerger = createInMemoryMerger();
  this.inMemoryMerger.start();
  
  this.onDiskMerger = new OnDiskMerger(this);
  this.onDiskMerger.start();
  
  this.mergePhase = mergePhase;
}
 
Example 13
Source File: OutputHandler.java    From RDFS with Apache License 2.0 4 votes vote down vote up
public void registerCounter(int id, String group, String name) throws IOException {
  Counters.Counter counter = reporter.getCounter(group, name);
  registeredCounters.put(id, counter);
}
 
Example 14
Source File: TestPipeApplication.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/**
 * test PipesMapRunner    test the transfer data from reader
 *
 * @throws Exception
 */
@Test
public void testRunner() throws Exception {

  // clean old password files
  File[] psw = cleanTokenPasswordFile();
  try {
    RecordReader<FloatWritable, NullWritable> rReader = new ReaderPipesMapRunner();
    JobConf conf = new JobConf();
    conf.set(Submitter.IS_JAVA_RR, "true");
    // for stdour and stderror

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);

    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    FileSystem fs = new RawLocalFileSystem();
    fs.setConf(conf);
    Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs.create(
            new Path(workSpace + File.separator + "outfile")), IntWritable.class,
            Text.class, null, null, true);
    output.setWriter(wr);
    // stub for client
    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationRunnableStub");

    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());
    // token for authorization
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token,  conf.getCredentials());
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    TestTaskReporter reporter = new TestTaskReporter();
    PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text> runner = new PipesMapRunner<FloatWritable, NullWritable, IntWritable, Text>();

    initStdOut(conf);

    runner.configure(conf);
    runner.run(rReader, output, reporter);

    String stdOut = readStdOut(conf);

    // test part of translated data. As common file for client and test -
    // clients stdOut
    // check version
    assertTrue(stdOut.contains("CURRENT_PROTOCOL_VERSION:0"));
    // check key and value classes
    assertTrue(stdOut
            .contains("Key class:org.apache.hadoop.io.FloatWritable"));
    assertTrue(stdOut
            .contains("Value class:org.apache.hadoop.io.NullWritable"));
    // test have sent all data from reader
    assertTrue(stdOut.contains("value:0.0"));
    assertTrue(stdOut.contains("value:9.0"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }

  }
}
 
Example 15
Source File: JobStatistics.java    From RDFS with Apache License 2.0 4 votes vote down vote up
private void parseAndAddReduceTaskCounters(ReduceTaskStatistics reduceTask, String counters) throws ParseException {
  Counters cnt = Counters.fromEscapedCompactString(counters);
  for (java.util.Iterator<Counters.Group> grps = cnt.iterator(); grps.hasNext(); ) {
    Counters.Group grp = grps.next();
    //String groupname = "<" + grp.getName() + ">::<" + grp.getDisplayName() + ">";
    for (java.util.Iterator<Counters.Counter> mycounters = grp.iterator(); mycounters.hasNext(); ) {
      Counters.Counter counter = mycounters.next();
      //String countername = "<"+counter.getName()+">::<"+counter.getDisplayName()+">::<"+counter.getValue()+">";
      //System.out.println("groupName:"+groupname+",countername: "+countername);
      String countername = grp.getDisplayName()+"."+counter.getDisplayName();
      String value = (new Long(counter.getValue())).toString();
      String[] parts = {countername,value};
      //System.out.println("part0:"+parts[0]+",:part1 "+parts[1]);
      if (parts[0].equals("FileSystemCounters.FILE_BYTES_READ")) {
        reduceTask.setValue(ReduceTaskKeys.FILE_BYTES_READ, parts[1]);
      } else if (parts[0].equals("FileSystemCounters.FILE_BYTES_WRITTEN")) {
        reduceTask.setValue(ReduceTaskKeys.FILE_BYTES_WRITTEN, parts[1]);
      } else if (parts[0].equals("FileSystemCounters.HDFS_BYTES_READ")) {
        reduceTask.setValue(ReduceTaskKeys.HDFS_BYTES_READ, parts[1]);
      } else if (parts[0].equals("FileSystemCounters.HDFS_BYTES_WRITTEN")) {
        reduceTask.setValue(ReduceTaskKeys.HDFS_BYTES_WRITTEN, parts[1]);
      } else if (parts[0].equals("Map-Reduce Framework.Reduce input records")) {
        reduceTask.setValue(ReduceTaskKeys.INPUT_RECORDS, parts[1]);
      } else if (parts[0].equals("Map-Reduce Framework.Reduce output records")) {
        reduceTask.setValue(ReduceTaskKeys.OUTPUT_RECORDS, parts[1]);
      } else if (parts[0].equals("Map-Reduce Framework.Combine input records")) {
        reduceTask.setValue(ReduceTaskKeys.COMBINE_INPUT_RECORDS, parts[1]);
      } else if (parts[0].equals("Map-Reduce Framework.Combine output records")) {
        reduceTask.setValue(ReduceTaskKeys.COMBINE_OUTPUT_RECORDS, parts[1]);
      } else if (parts[0].equals("Map-Reduce Framework.Reduce input groups")) {
        reduceTask.setValue(ReduceTaskKeys.INPUT_GROUPS, parts[1]);
      } else if (parts[0].equals("Map-Reduce Framework.Spilled Records")) {
        reduceTask.setValue(ReduceTaskKeys.SPILLED_RECORDS, parts[1]);
      } else if (parts[0].equals("Map-Reduce Framework.Reduce shuffle bytes")) {
        reduceTask.setValue(ReduceTaskKeys.SHUFFLE_BYTES, parts[1]);
      } else {
        System.err.println("ReduceCounterKey:<"+parts[0]+"> ==> NOT INCLUDED IN PERFORMANCE ADVISOR REDUCE TASK");
      }
    }
  }    
}
 
Example 16
Source File: TestPipeApplication.java    From big-c with Apache License 2.0 4 votes vote down vote up
/**
 * test org.apache.hadoop.mapred.pipes.Application
 * test a internal functions: MessageType.REGISTER_COUNTER,  INCREMENT_COUNTER, STATUS, PROGRESS...
 *
 * @throws Throwable
 */

@Test
public void testApplication() throws Throwable {
  JobConf conf = new JobConf();

  RecordReader<FloatWritable, NullWritable> rReader = new Reader();

  // client for test
  File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeApplicationStub");

  TestTaskReporter reporter = new TestTaskReporter();

  File[] psw = cleanTokenPasswordFile();
  try {

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());

    // token for authorization
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));

    TokenCache.setJobToken(token, conf.getCredentials());
    FakeCollector output = new FakeCollector(new Counters.Counter(),
            new Progress());
    FileSystem fs = new RawLocalFileSystem();
    fs.setConf(conf);
    Writer<IntWritable, Text> wr = new Writer<IntWritable, Text>(conf, fs.create(
            new Path(workSpace.getAbsolutePath() + File.separator + "outfile")),
            IntWritable.class, Text.class, null, null, true);
    output.setWriter(wr);
    conf.set(Submitter.PRESERVE_COMMANDFILE, "true");

    initStdOut(conf);

    Application<WritableComparable<IntWritable>, Writable, IntWritable, Text> application = new Application<WritableComparable<IntWritable>, Writable, IntWritable, Text>(
            conf, rReader, output, reporter, IntWritable.class, Text.class);
    application.getDownlink().flush();

    application.getDownlink().mapItem(new IntWritable(3), new Text("txt"));

    application.getDownlink().flush();

    application.waitForFinish();

    wr.close();

    // test getDownlink().mapItem();
    String stdOut = readStdOut(conf);
    assertTrue(stdOut.contains("key:3"));
    assertTrue(stdOut.contains("value:txt"));

    // reporter test counter, and status should be sended
    // test MessageType.REGISTER_COUNTER and INCREMENT_COUNTER
    assertEquals(1.0, reporter.getProgress(), 0.01);
    assertNotNull(reporter.getCounter("group", "name"));
    // test status MessageType.STATUS
    assertEquals(reporter.getStatus(), "PROGRESS");
    stdOut = readFile(new File(workSpace.getAbsolutePath() + File.separator
            + "outfile"));
    // check MessageType.PROGRESS
    assertEquals(0.55f, rReader.getProgress(), 0.001);
    application.getDownlink().close();
    // test MessageType.OUTPUT
    Entry<IntWritable, Text> entry = output.getCollect().entrySet()
            .iterator().next();
    assertEquals(123, entry.getKey().get());
    assertEquals("value", entry.getValue().toString());
    try {
      // try to abort
      application.abort(new Throwable());
      fail();
    } catch (IOException e) {
      // abort works ?
      assertEquals("pipe child exception", e.getMessage());
    }
  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }
  }
}
 
Example 17
Source File: TestPipeApplication.java    From big-c with Apache License 2.0 4 votes vote down vote up
public CombineOutputCollector(Counters.Counter outCounter,
                              Progressable progressable) {
  this.outCounter = outCounter;
  this.progressable = progressable;
}
 
Example 18
Source File: dummyReporter.java    From iow-hadoop-streaming with Apache License 2.0 4 votes vote down vote up
@Override
public Counters.Counter getCounter(Enum<?> name) {
    return null;
}
 
Example 19
Source File: HadoopV1Counter.java    From ignite with Apache License 2.0 4 votes vote down vote up
/** {@inheritDoc} */
@SuppressWarnings("deprecation")
@Override public boolean contentEquals(Counters.Counter cntr) {
    return getUnderlyingCounter().equals(cntr.getUnderlyingCounter());
}
 
Example 20
Source File: TestPipeApplication.java    From hadoop with Apache License 2.0 4 votes vote down vote up
/**
 * test org.apache.hadoop.mapred.pipes.PipesReducer
 * test the transfer of data: key and value
 *
 * @throws Exception
 */
@Test
public void testPipesReduser() throws Exception {

  File[] psw = cleanTokenPasswordFile();
  JobConf conf = new JobConf();
  try {
    Token<AMRMTokenIdentifier> token = new Token<AMRMTokenIdentifier>(
            "user".getBytes(), "password".getBytes(), new Text("kind"), new Text(
            "service"));
    TokenCache.setJobToken(token, conf.getCredentials());

    File fCommand = getFileCommand("org.apache.hadoop.mapred.pipes.PipeReducerStub");
    conf.set(MRJobConfig.CACHE_LOCALFILES, fCommand.getAbsolutePath());

    PipesReducer<BooleanWritable, Text, IntWritable, Text> reducer = new PipesReducer<BooleanWritable, Text, IntWritable, Text>();
    reducer.configure(conf);
    BooleanWritable bw = new BooleanWritable(true);

    conf.set(MRJobConfig.TASK_ATTEMPT_ID, taskName);
    initStdOut(conf);
    conf.setBoolean(MRJobConfig.SKIP_RECORDS, true);
    CombineOutputCollector<IntWritable, Text> output = new CombineOutputCollector<IntWritable, Text>(
            new Counters.Counter(), new Progress());
    Reporter reporter = new TestTaskReporter();
    List<Text> texts = new ArrayList<Text>();
    texts.add(new Text("first"));
    texts.add(new Text("second"));
    texts.add(new Text("third"));

    reducer.reduce(bw, texts.iterator(), output, reporter);
    reducer.close();
    String stdOut = readStdOut(conf);
    // test data: key
    assertTrue(stdOut.contains("reducer key :true"));
    // and values
    assertTrue(stdOut.contains("reduce value  :first"));
    assertTrue(stdOut.contains("reduce value  :second"));
    assertTrue(stdOut.contains("reduce value  :third"));

  } finally {
    if (psw != null) {
      // remove password files
      for (File file : psw) {
        file.deleteOnExit();
      }
    }
  }

}