Java Code Examples for org.apache.hadoop.metrics.MetricsUtil

The following examples show how to use org.apache.hadoop.metrics.MetricsUtil. These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source Project: hadoop   Source File: CompositeContext.java    License: Apache License 2.0 6 votes vote down vote up
@InterfaceAudience.Private
public void init(String contextName, ContextFactory factory) {
  super.init(contextName, factory);
  int nKids;
  try {
    String sKids = getAttribute(ARITY_LABEL);
    nKids = Integer.parseInt(sKids);
  } catch (Exception e) {
    LOG.error("Unable to initialize composite metric " + contextName +
              ": could not init arity", e);
    return;
  }
  for (int i = 0; i < nKids; ++i) {
    MetricsContext ctxt = MetricsUtil.getContext(
        String.format(SUB_FMT, contextName, i), contextName);
    if (null != ctxt) {
      subctxt.add(ctxt);
    }
  }
}
 
Example 2
Source Project: big-c   Source File: CompositeContext.java    License: Apache License 2.0 6 votes vote down vote up
@InterfaceAudience.Private
public void init(String contextName, ContextFactory factory) {
  super.init(contextName, factory);
  int nKids;
  try {
    String sKids = getAttribute(ARITY_LABEL);
    nKids = Integer.parseInt(sKids);
  } catch (Exception e) {
    LOG.error("Unable to initialize composite metric " + contextName +
              ": could not init arity", e);
    return;
  }
  for (int i = 0; i < nKids; ++i) {
    MetricsContext ctxt = MetricsUtil.getContext(
        String.format(SUB_FMT, contextName, i), contextName);
    if (null != ctxt) {
      subctxt.add(ctxt);
    }
  }
}
 
Example 3
Source Project: RDFS   Source File: ClusterManagerMetrics.java    License: Apache License 2.0 6 votes vote down vote up
/**
 * Constructor.
 * @param types The available resource types.
 */
public ClusterManagerMetrics(Collection<ResourceType> types) {
  context = MetricsUtil.getContext(CONTEXT_NAME);
  metricsRecord = MetricsUtil.createRecord(context, CONTEXT_NAME);
  typeToResourceRequested = createTypeToResourceCountMap(types, "requested");
  typeToResourceGranted = createTypeToResourceCountMap(types, "granted");
  typeToResourceRevoked = createTypeToResourceCountMap(types, "revoked");
  typeToResourceReleased = createTypeToResourceCountMap(types, "released");
  typeToPendingCount = createTypeToCountMap(types, "pending");
  typeToRunningCount = createTypeToCountMap(types, "running");
  typeToTotalSlots = createTypeToCountMap(types, "total");
  typeToFreeSlots = createTypeToCountMap(types, "free");
  typeToSchedulerRunTime = createTypeToCountMap(types, "scheduler_runtime");
  sessionStatusToMetrics = createSessionStatusToMetricsMap();
  aliveNodes = new MetricsIntValue("alive_nodes", registry);
  deadNodes = new MetricsIntValue("dead_nodes", registry);
  blacklistedNodes = new MetricsIntValue("blacklisted_nodes", registry);
  numRunningSessions = new MetricsIntValue("num_running_sessions", registry);
  totalSessionCount = new MetricsTimeVaryingInt("total_sessions", registry);
  pendingCallsCount = new MetricsIntValue("num_pending_calls", registry);
  numCJTFailures = new MetricsTimeVaryingInt("num_cjt_failures", registry);
}
 
Example 4
Source Project: RDFS   Source File: FairSchedulerMetricsInst.java    License: Apache License 2.0 6 votes vote down vote up
private void submitPoolMetrics(PoolInfo info) {
  MetricsRecord record = poolToMetricsRecord.get(info.poolName);
  if (record == null) {
    record = MetricsUtil.createRecord(context, "pool-" + info.poolName);
    FairScheduler.LOG.info("Create metrics record for pool:" + info.poolName);
    poolToMetricsRecord.put(info.poolName, record);
  }
  record.setMetric("min_map", info.minMaps);
  record.setMetric("min_reduce", info.minReduces);
  record.setMetric("max_map", info.maxMaps);
  record.setMetric("max_reduce", info.maxReduces);
  record.setMetric("running_map", info.runningMaps);
  record.setMetric("running_reduce", info.runningReduces);
  record.setMetric("runnable_map", info.runnableMaps);
  record.setMetric("runnable_reduce", info.runnableReduces);
  record.setMetric("inited_tasks", info.initedTasks);
  record.setMetric("max_inited_tasks", info.maxInitedTasks);
  int runningJobs = info.runningJobs;
  record.setMetric("avg_first_map_wait_ms",
      (runningJobs == 0) ? 0 : info.totalFirstMapWaitTime / runningJobs);
  record.setMetric("avg_first_reduce_wait_ms",
      (runningJobs == 0) ? 0 : info.totalFirstReduceWaitTime / runningJobs);
}
 
Example 5
Source Project: RDFS   Source File: CompositeContext.java    License: Apache License 2.0 6 votes vote down vote up
public void init(String contextName, ContextFactory factory) {
  super.init(contextName, factory);
  int nKids;
  try {
    String sKids = getAttribute(ARITY_LABEL);
    nKids = Integer.valueOf(sKids);
  } catch (Exception e) {
    LOG.error("Unable to initialize composite metric " + contextName +
              ": could not init arity", e);
    return;
  }
  for (int i = 0; i < nKids; ++i) {
    MetricsContext ctxt = MetricsUtil.getContext(
        String.format(SUB_FMT, contextName, i), contextName);
    if (null != ctxt) {
      subctxt.add(ctxt);
    }
  }
}
 
Example 6
Source Project: RDFS   Source File: HighTideNodeMetrics.java    License: Apache License 2.0 6 votes vote down vote up
public HighTideNodeMetrics(Configuration conf, HighTideNode hightideNode) {
  String sessionId = conf.get("session.id");
  // Initiate Java VM metrics
  JvmMetrics.init("HighTideNode", sessionId);


  // Now the Mbean for the name node - this also registers the MBean
  hightidenodeActivityMBean = new HighTideNodeActivityMBean(registry);

  // Create a record for HighTideNode metrics
  MetricsContext metricsContext = MetricsUtil.getContext("dfs");
  metricsRecord = MetricsUtil.createRecord(metricsContext, "hightidenode");
  metricsRecord.setTag("sessionId", sessionId);
  metricsContext.registerUpdater(this);
  LOG.info("Initializing HighTideNodeMetrics using context object:" +
            metricsContext.getClass().getName());
}
 
Example 7
Source Project: incubator-tez   Source File: ShuffleClientMetrics.java    License: Apache License 2.0 6 votes vote down vote up
ShuffleClientMetrics(String dagName, String vertexName, int taskIndex, Configuration conf, 
    String user) {
  this.numCopiers = 
      conf.getInt(
          TezJobConfig.TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES, 
          TezJobConfig.TEZ_RUNTIME_SHUFFLE_PARALLEL_COPIES_DEFAULT);

  MetricsContext metricsContext = MetricsUtil.getContext(Constants.TEZ);
  this.shuffleMetrics = 
    MetricsUtil.createRecord(metricsContext, "shuffleInput");
  this.shuffleMetrics.setTag("user", user);
  this.shuffleMetrics.setTag("dagName", dagName);
  this.shuffleMetrics.setTag("taskId", TezRuntimeUtils.getTaskIdentifier(vertexName, taskIndex));
  this.shuffleMetrics.setTag("sessionId", 
      conf.get(
          TezRuntimeFrameworkConfigs.TEZ_RUNTIME_METRICS_SESSION_ID,
          TezRuntimeFrameworkConfigs.TEZ_RUNTIME_METRICS_SESSION_ID_DEFAULT));
  metricsContext.registerUpdater(this);
}
 
Example 8
Source Project: hadoop-gpu   Source File: CompositeContext.java    License: Apache License 2.0 6 votes vote down vote up
public void init(String contextName, ContextFactory factory) {
  super.init(contextName, factory);
  int nKids;
  try {
    String sKids = getAttribute(ARITY_LABEL);
    nKids = Integer.valueOf(sKids);
  } catch (Exception e) {
    LOG.error("Unable to initialize composite metric " + contextName +
              ": could not init arity", e);
    return;
  }
  for (int i = 0; i < nKids; ++i) {
    MetricsContext ctxt = MetricsUtil.getContext(
        String.format(SUB_FMT, contextName, i), contextName);
    if (null != ctxt) {
      subctxt.add(ctxt);
    }
  }
}
 
Example 9
Source Project: hadoop   Source File: LocalJobRunnerMetrics.java    License: Apache License 2.0 5 votes vote down vote up
public LocalJobRunnerMetrics(JobConf conf) {
  String sessionId = conf.getSessionId();
  // Initiate JVM Metrics
  JvmMetrics.init("JobTracker", sessionId);
  // Create a record for map-reduce metrics
  MetricsContext context = MetricsUtil.getContext("mapred");
  // record name is jobtracker for compatibility 
  metricsRecord = MetricsUtil.createRecord(context, "jobtracker");
  metricsRecord.setTag("sessionId", sessionId);
  context.registerUpdater(this);
}
 
Example 10
Source Project: hadoop   Source File: ShuffleClientMetrics.java    License: Apache License 2.0 5 votes vote down vote up
ShuffleClientMetrics(TaskAttemptID reduceId, JobConf jobConf) {
  this.numCopiers = jobConf.getInt(MRJobConfig.SHUFFLE_PARALLEL_COPIES, 5);

  MetricsContext metricsContext = MetricsUtil.getContext("mapred");
  this.shuffleMetrics = 
    MetricsUtil.createRecord(metricsContext, "shuffleInput");
  this.shuffleMetrics.setTag("user", jobConf.getUser());
  this.shuffleMetrics.setTag("jobName", jobConf.getJobName());
  this.shuffleMetrics.setTag("jobId", reduceId.getJobID().toString());
  this.shuffleMetrics.setTag("taskId", reduceId.toString());
  this.shuffleMetrics.setTag("sessionId", jobConf.getSessionId());
  metricsContext.registerUpdater(this);
}
 
Example 11
Source Project: hadoop   Source File: JvmMetrics.java    License: Apache License 2.0 5 votes vote down vote up
/** Creates a new instance of JvmMetrics */
private JvmMetrics(String processName, String sessionId,
  String recordName) {
    MetricsContext context = MetricsUtil.getContext("jvm");
    metrics = MetricsUtil.createRecord(context, recordName);
    metrics.setTag("processName", processName);
    metrics.setTag("sessionId", sessionId);
    context.registerUpdater(this);
}
 
Example 12
Source Project: big-c   Source File: LocalJobRunnerMetrics.java    License: Apache License 2.0 5 votes vote down vote up
public LocalJobRunnerMetrics(JobConf conf) {
  String sessionId = conf.getSessionId();
  // Initiate JVM Metrics
  JvmMetrics.init("JobTracker", sessionId);
  // Create a record for map-reduce metrics
  MetricsContext context = MetricsUtil.getContext("mapred");
  // record name is jobtracker for compatibility 
  metricsRecord = MetricsUtil.createRecord(context, "jobtracker");
  metricsRecord.setTag("sessionId", sessionId);
  context.registerUpdater(this);
}
 
Example 13
Source Project: big-c   Source File: ShuffleClientMetrics.java    License: Apache License 2.0 5 votes vote down vote up
ShuffleClientMetrics(TaskAttemptID reduceId, JobConf jobConf) {
  this.numCopiers = jobConf.getInt(MRJobConfig.SHUFFLE_PARALLEL_COPIES, 5);

  MetricsContext metricsContext = MetricsUtil.getContext("mapred");
  this.shuffleMetrics = 
    MetricsUtil.createRecord(metricsContext, "shuffleInput");
  this.shuffleMetrics.setTag("user", jobConf.getUser());
  this.shuffleMetrics.setTag("jobName", jobConf.getJobName());
  this.shuffleMetrics.setTag("jobId", reduceId.getJobID().toString());
  this.shuffleMetrics.setTag("taskId", reduceId.toString());
  this.shuffleMetrics.setTag("sessionId", jobConf.getSessionId());
  metricsContext.registerUpdater(this);
}
 
Example 14
Source Project: big-c   Source File: JvmMetrics.java    License: Apache License 2.0 5 votes vote down vote up
/** Creates a new instance of JvmMetrics */
private JvmMetrics(String processName, String sessionId,
  String recordName) {
    MetricsContext context = MetricsUtil.getContext("jvm");
    metrics = MetricsUtil.createRecord(context, recordName);
    metrics.setTag("processName", processName);
    metrics.setTag("sessionId", sessionId);
    context.registerUpdater(this);
}
 
Example 15
Source Project: RDFS   Source File: JobTrackerMetricsInst.java    License: Apache License 2.0 5 votes vote down vote up
public JobTrackerMetricsInst(JobTracker tracker, JobConf conf) {
  super(tracker, conf);
  String sessionId = conf.getSessionId();
  // Initiate JVM Metrics
  JvmMetrics.init("JobTracker", sessionId);
  // Create a record for map-reduce metrics
  MetricsContext context = MetricsUtil.getContext("mapred");
  metricsRecord = MetricsUtil.createRecord(context, "jobtracker");
  metricsRecord.setTag("sessionId", sessionId);
  context.registerUpdater(this);
}
 
Example 16
Source Project: RDFS   Source File: TaskTrackerMetricsInst.java    License: Apache License 2.0 5 votes vote down vote up
public TaskTrackerMetricsInst(TaskTracker t) {
  super(t);
  JobConf conf = tt.getJobConf();
  String sessionId = conf.getSessionId();
  // Initiate Java VM Metrics
  JvmMetrics.init("TaskTracker", sessionId);
  // Create a record for Task Tracker metrics
  MetricsContext context = MetricsUtil.getContext("mapred");
  metricsRecord = MetricsUtil.createRecord(context, "tasktracker"); //guaranteed never null
  metricsRecord.setTag("sessionId", sessionId);
  context.registerUpdater(this);
}
 
Example 17
Source Project: RDFS   Source File: TaskErrorCollector.java    License: Apache License 2.0 5 votes vote down vote up
public TaskErrorCollector(Configuration conf) {
  errorCountsQueue = new LinkedList<Map<TaskError, Integer>>();
  startTimeQueue = new LinkedList<Long>();
  errorCountsMetrics = new HashMap<TaskError, MetricsTimeVaryingLong>();
  MetricsContext context = MetricsUtil.getContext("mapred");
  metricsRecord = MetricsUtil.createRecord(context, "taskerror");
  registry = new MetricsRegistry();
  windowLength = conf.getInt(WINDOW_LENGTH_KEY, WINDOW_LENGTH);
  numWindows = conf.getInt(NUM_WINDOWS_KEY, NUM_WINDOWS);

  context.registerUpdater(this);

  String configFilePath = conf.get(CONFIG_FILE_KEY);
  if (configFilePath == null) {
    // Search the class path if it is not configured
    URL u = TaskErrorCollector.class.getClassLoader().getResource(ERROR_XML);
    if (u != null) {
      configFilePath = u.getPath();
    }
  }
  if (configFilePath == null) {
    LOG.warn("No " + CONFIG_FILE_KEY + " given in conf. " +
         TaskErrorCollector.class.getSimpleName() +
         " will see every error as UNKNOWN_ERROR.");
    knownErrors = Collections.emptyMap();
  } else {
    knownErrors = parseConfigFile(configFilePath);
  }
  createMetrics();
  sinceStartErrorCounts = createErrorCountsMap();
}
 
Example 18
Source Project: RDFS   Source File: ReduceTask.java    License: Apache License 2.0 5 votes vote down vote up
ShuffleClientMetrics(JobConf conf) {
  MetricsContext metricsContext = MetricsUtil.getContext("mapred");
  this.shuffleMetrics =
    MetricsUtil.createRecord(metricsContext, "shuffleInput");
  this.shuffleMetrics.setTag("user", conf.getUser());
  this.shuffleMetrics.setTag("jobName", conf.getJobName());
  this.shuffleMetrics.setTag("jobId", ReduceTask.this.getJobID().toString());
  this.shuffleMetrics.setTag("taskId", getTaskID().toString());
  this.shuffleMetrics.setTag("sessionId", conf.getSessionId());
  metricsContext.registerUpdater(this);
}
 
Example 19
Source Project: RDFS   Source File: TaskTracker.java    License: Apache License 2.0 5 votes vote down vote up
ShuffleServerMetrics(JobConf conf) {
  MetricsContext context = MetricsUtil.getContext("mapred");
  shuffleMetricsRecord =
                       MetricsUtil.createRecord(context, "shuffleOutput");
  this.shuffleMetricsRecord.setTag("sessionId", conf.getSessionId());
  context.registerUpdater(this);
}
 
Example 20
Source Project: RDFS   Source File: RaidNodeMetrics.java    License: Apache License 2.0 5 votes vote down vote up
private RaidNodeMetrics() {
  // Create a record for raid metrics
  context = MetricsUtil.getContext("raidnode");
  metricsRecord = MetricsUtil.createRecord(context, "raidnode");
  context.registerUpdater(this);
  initPlacementMetrics();
  initSourceMetrics();
  initParityMetrics();
  LOG.info("RaidNode Metrics is initialized");
}
 
Example 21
Source Project: RDFS   Source File: ClusterManagerMetricsVerifier.java    License: Apache License 2.0 5 votes vote down vote up
private void verifyMetrics(String name, int expectValue) throws Exception {
  MetricsContext context = MetricsUtil.getContext(
      ClusterManagerMetrics.CONTEXT_NAME);
  cm.metrics.doUpdates(context);
  OutputRecord record = context.getAllRecords().get(
      ClusterManagerMetrics.CONTEXT_NAME).iterator().next();
  Assert.assertEquals(expectValue, record.getMetric(name).intValue());
}
 
Example 22
Source Project: RDFS   Source File: FairSchedulerMetricsInst.java    License: Apache License 2.0 5 votes vote down vote up
public FairSchedulerMetricsInst(FairScheduler scheduler, Configuration conf) {
  // Create a record for map-reduce metrics
  metricsRecord = MetricsUtil.createRecord(context, "fairscheduler");
  poolToMetricsRecord = new HashMap<String, MetricsRecord>();
  context.registerUpdater(this);

  updatePeriod = conf.getLong("mapred.fairscheduler.metric.update.period",
                              5 * 1000);  // default period is 5 seconds.
  jobInitializer = scheduler.getJobInitializer();
}
 
Example 23
Source Project: RDFS   Source File: JvmMetrics.java    License: Apache License 2.0 5 votes vote down vote up
/** Creates a new instance of JvmMetrics */
private JvmMetrics(String processName, String sessionId,
  String recordName) {
    MetricsContext context = MetricsUtil.getContext("jvm");
    metrics = MetricsUtil.createRecord(context, recordName);
    metrics.setTag("processName", processName);
    metrics.setTag("sessionId", sessionId);
    context.registerUpdater(this);
}
 
Example 24
Source Project: RDFS   Source File: RpcMetrics.java    License: Apache License 2.0 5 votes vote down vote up
public RpcMetrics(String hostName, String port, Server server) {
  myServer = server;
  context = MetricsUtil.getContext("rpc");
  metricsRecord = MetricsUtil.createRecord(context, "metrics");

  metricsRecord.setTag("port", port);

  LOG.info("Initializing RPC Metrics with hostName=" 
      + hostName + ", port=" + port);

  context.registerUpdater(this);
  
  // Need to clean up the interface to RpcMgt - don't need both metrics and server params
  rpcMBean = new RpcActivityMBean(registry, hostName, port);
}
 
Example 25
Source Project: RDFS   Source File: DFSClientMetrics.java    License: Apache License 2.0 5 votes vote down vote up
public DFSClientMetrics() {
	// Create a record for FSNamesystem metrics
	MetricsContext metricsContext = MetricsUtil.getContext("hdfsclient");
	metricsRecord = MetricsUtil.createRecord(metricsContext, "DFSClient");
	metricsContext.registerUpdater(this);

}
 
Example 26
Source Project: RDFS   Source File: LookasideMetrics.java    License: Apache License 2.0 5 votes vote down vote up
public LookasideMetrics() {
  // Create a record for LookasideCache metrics
  MetricsContext metricsContext = MetricsUtil.getContext("lookasideCache");
  metricsRecord = MetricsUtil.createRecord(metricsContext,
                                           "LookasideFileSystem");
  metricsContext.registerUpdater(this);

}
 
Example 27
Source Project: RDFS   Source File: DataNodeMetrics.java    License: Apache License 2.0 5 votes vote down vote up
public DataNodeMetrics(Configuration conf, String storageId) {
  String sessionId = conf.get("session.id"); 
  // Initiate reporting of Java VM metrics
  JvmMetrics.init("DataNode", sessionId);
  

  // Now the MBean for the data node
  datanodeActivityMBean = new DataNodeActivityMBean(registry, storageId);
  
  // Create record for DataNode metrics
  MetricsContext context = MetricsUtil.getContext("dfs");
  metricsRecord = MetricsUtil.createRecord(context, "datanode");
  metricsRecord.setTag("sessionId", sessionId);
  context.registerUpdater(this);
}
 
Example 28
Source Project: hbase-indexer   Source File: SepMetrics.java    License: Apache License 2.0 5 votes vote down vote up
public SepMetrics(String recordName) {
    this.recordName = recordName;
    metricsRegistry = new MetricsRegistry();
    sepProcessingRate = new MetricsTimeVaryingRate("sepProcessed", metricsRegistry);
    lastTimestampInputProcessed = new MetricsLongValue("lastSepTimestamp", metricsRegistry);

    context = MetricsUtil.getContext("repository");
    metricsRecord = MetricsUtil.createRecord(context, recordName);
    context.registerUpdater(this);
    mbean = new SepMetricsMXBean(this.metricsRegistry);
}
 
Example 29
Source Project: hadoop-gpu   Source File: JobTrackerMetricsInst.java    License: Apache License 2.0 5 votes vote down vote up
public JobTrackerMetricsInst(JobTracker tracker, JobConf conf) {
  super(tracker, conf);
  String sessionId = conf.getSessionId();
  // Initiate JVM Metrics
  JvmMetrics.init("JobTracker", sessionId);
  // Create a record for map-reduce metrics
  MetricsContext context = MetricsUtil.getContext("mapred");
  metricsRecord = MetricsUtil.createRecord(context, "jobtracker");
  metricsRecord.setTag("sessionId", sessionId);
  context.registerUpdater(this);
}
 
Example 30
Source Project: hadoop-gpu   Source File: TaskTrackerMetricsInst.java    License: Apache License 2.0 5 votes vote down vote up
public TaskTrackerMetricsInst(TaskTracker t) {
  super(t);
  JobConf conf = tt.getJobConf();
  String sessionId = conf.getSessionId();
  // Initiate Java VM Metrics
  JvmMetrics.init("TaskTracker", sessionId);
  // Create a record for Task Tracker metrics
  MetricsContext context = MetricsUtil.getContext("mapred");
  metricsRecord = MetricsUtil.createRecord(context, "tasktracker"); //guaranteed never null
  metricsRecord.setTag("sessionId", sessionId);
  context.registerUpdater(this);
}