package org.apache.hadoop.hdfs;
/**
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */

import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Date;
import java.util.Random;

import org.apache.hadoop.mapred.GenMapper;
import org.apache.hadoop.mapred.GenReduce;
import org.apache.hadoop.mapred.GenThread;
import org.apache.hadoop.mapred.GenReaderThread;
import org.apache.hadoop.mapred.GenWriterThread;
import org.apache.hadoop.mapred.DatanodeBenThread;
import org.apache.hadoop.mapred.DatanodeBenThread.RUNNING_TYPE;
import org.apache.hadoop.mapred.DatanodeBenThread.DatanodeBenRunTimeConstants;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import java.text.SimpleDateFormat;
import java.text.DateFormat;

@SuppressWarnings("deprecation")
public class DFSGeneralTest extends Configured implements Tool, GeneralConstant{

  private static Configuration fsConfig;
  private long nmaps;
  private long nthreads;
  private int buffersize = GenThread.DEFAULT_BUFFER_SIZE;
  private long datarate = GenThread.DEFAULT_DATA_RATE;
  static final String[] testtypes = {GenWriterThread.TEST_TYPE, 
                                     DatanodeBenThread.TEST_TYPE};
  private static String testtype = null;
  private final static String DEFAULT_USAGE = 
      "USAGE: bin/hadoop hadoop-*-benchmark.jar " + 
      "gentest %s [-nMaps] [-nThreads] [-buffersize] [-workdir] " +
      "[-writerate] [-cleanup] %s\n";
  private String dfs_output = null;
  private String dfs_input = null;
  private String input = null;
  private String output = null;
  private String workdir = null;
  private boolean cleanup = false;
  private Random rb = new Random();
  private static final DateFormat dateFormat =
      new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss-SSS");

  private String uniqueId = (dateFormat.format(new Date())) + "." 
      + rb.nextInt();

  public static void printUsage() {
    System.err.printf(DEFAULT_USAGE, "testtype", "<args...>"); 
    System.err.print("    testtype could be ");
    for (String type: testtypes) {
      System.err.print("\"" + type + "\" ");
    }
    System.err.println();
    System.err.println("    -nMaps [number of machines] Default value = " + NMAPS);
    System.err.println("    -nThreads [number of threads in one machine] Default "
        + "value = " + NTHREADS);
    System.err.println("    -buffersize [X KB buffer] default value = " +
          GenThread.DEFAULT_BUFFER_SIZE);
    System.err.println("    -workdir [working directory] default value = " + 
        INPUT + "[testtype]");
    System.err.println("    -writerate [X KB data allowed to write per " +
        "second] default value = " + GenThread.DEFAULT_DATA_RATE);
    System.err.println("    -cleanup :delete all temp data when test is done.");
    System.err.println();
    for (String type : testtypes) {
      System.err.println("Test " + type + ":");
      printUsage(type, false);
    }
    System.exit(1);
  }
  
  public static void printUsage(String testtype, boolean exitAfterPrint) {
    if (testtype.equals(GenWriterThread.TEST_TYPE)) {
      System.err.printf(DEFAULT_USAGE, testtype, "[-sync] [-roll] " 
                        + "[-maxtime] ");
      System.err.println("    -sync [(sec) sync file once/Xsec] <=0 " +
          "means no sync default value = " + 
          GenWriterThread.DEFAULT_SYNC_INTERVAL_SEC);
      System.err.println("    -roll [(sec) roll file once/Xsec] <=0 " +
          "means no roll, default value = " +
          GenWriterThread.DEFAULT_ROLL_INTERVAL_SEC);
      System.err.println("    -maxtime [(sec) max running time] default " +
          "value = " + GenWriterThread.DEFAULT_MAX_TIME_SEC);
      System.err.println();
    } else if (testtype.equals(DatanodeBenThread.TEST_TYPE)) {
      System.err.printf(DEFAULT_USAGE, testtype, "{[-prepare]} {[-maxtime] " +
          "[-filesize] [-dn] [-pread] [-minfile] [-rep]}");
      System.err.println("    -prepare [generate at least X files per " +
          "datanode in each namespace] default value = " + 
          DatanodeBenThread.DEFAULT_MIN_NUMBER_OF_FILES_PER_DATANODE + 
          " Need to run prepare first before running benchmark");
      System