org.apache.spark.ui.UIUtils Scala Examples

The following examples show how to use org.apache.spark.ui.UIUtils. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: HistoryPage.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.history

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.status.api.v1.ApplicationInfo
import org.apache.spark.ui.{UIUtils, WebUIPage}

private[history] class HistoryPage(parent: HistoryServer) extends WebUIPage("") {

  def render(request: HttpServletRequest): Seq[Node] = {
    // stripXSS is called first to remove suspicious characters used in XSS attacks
    val requestedIncomplete =
      Option(UIUtils.stripXSS(request.getParameter("showIncomplete"))).getOrElse("false").toBoolean

    val allAppsSize = parent.getApplicationList()
      .count(isApplicationCompleted(_) != requestedIncomplete)
    val eventLogsUnderProcessCount = parent.getEventLogsUnderProcess()
    val lastUpdatedTime = parent.getLastUpdatedTime()
    val providerConfig = parent.getProviderConfig()
    val content =
      <script src={UIUtils.prependBaseUri("/static/historypage-common.js")}></script> ++
      <script src={UIUtils.prependBaseUri("/static/utils.js")}></script>
      <div>
          <div class="container-fluid">
            <ul class="unstyled">
              {providerConfig.map { case (k, v) => <li><strong>{k}:</strong> {v}</li> }}
            </ul>
            {
            if (eventLogsUnderProcessCount > 0) {
              <p>There are {eventLogsUnderProcessCount} event log(s) currently being
                processed which may result in additional applications getting listed on this page.
                Refresh the page to view updates. </p>
            }
            }

            {
            if (lastUpdatedTime > 0) {
              <p>Last updated: <span id="last-updated">{lastUpdatedTime}</span></p>
            }
            }

            {
            <p>Client local time zone: <span id="time-zone"></span></p>
            }

            {
            if (allAppsSize > 0) {
              <script src={UIUtils.prependBaseUri("/static/dataTables.rowsGroup.js")}></script> ++
                <div id="history-summary" class="row-fluid"></div> ++
                <script src={UIUtils.prependBaseUri("/static/historypage.js")}></script> ++
                <script>setAppLimit({parent.maxApplications})</script>
            } else if (requestedIncomplete) {
              <h4>No incomplete applications found!</h4>
            } else if (eventLogsUnderProcessCount > 0) {
              <h4>No completed applications found!</h4>
            } else {
              <h4>No completed applications found!</h4> ++ parent.emptyListingHtml
            }
            }

            <a href={makePageLink(!requestedIncomplete)}>
              {
              if (requestedIncomplete) {
                "Back to completed applications"
              } else {
                "Show incomplete applications"
              }
              }
            </a>
          </div>
      </div>
    UIUtils.basicSparkPage(content, "History Server", true)
  }

  private def makePageLink(showIncomplete: Boolean): String = {
    UIUtils.prependBaseUri("/?" + "showIncomplete=" + showIncomplete)
  }

  private def isApplicationCompleted(appInfo: ApplicationInfo): Boolean = {
    appInfo.attempts.nonEmpty && appInfo.attempts.head.completed
  }
} 
Example 2
Source File: HistoryNotFoundPage.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master.ui

import java.net.URLDecoder
import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class HistoryNotFoundPage(parent: MasterWebUI)
  extends WebUIPage("history/not-found") {

  
  def render(request: HttpServletRequest): Seq[Node] = {
    val titleParam = request.getParameter("title")
    val msgParam = request.getParameter("msg")
    val exceptionParam = request.getParameter("exception")

    // If no parameters are specified, assume the user did not enable event logging
    val defaultTitle = "Event logging is not enabled"
    val defaultContent =
      <div class="row-fluid">
        <div class="span12" style="font-size:14px">
          No event logs were found for this application! To
          <a href="http://spark.apache.org/docs/latest/monitoring.html">enable event logging</a>,
          set <span style="font-style:italic">spark.eventLog.enabled</span> to true and
          <span style="font-style:italic">spark.eventLog.dir</span> to the directory to which your
          event logs are written.
        </div>
      </div>

    val title = Option(titleParam).getOrElse(defaultTitle)
    val content = Option(msgParam)
      .map { msg => URLDecoder.decode(msg, "UTF-8") }
      .map { msg =>
        <div class="row-fluid">
          <div class="span12" style="font-size:14px">{msg}</div>
        </div> ++
        Option(exceptionParam)
          .map { e => URLDecoder.decode(e, "UTF-8") }
          .map { e => <pre>{e}</pre> }
          .getOrElse(Seq.empty)
      }.getOrElse(defaultContent)

    UIUtils.basicSparkPage(content, title)
  }
} 
Example 3
Source File: ExecutorThreadDumpPage.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.exec

import java.net.URLDecoder
import javax.servlet.http.HttpServletRequest

import scala.util.Try
import scala.xml.{Text, Node}

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") {

  private val sc = parent.sc

  def render(request: HttpServletRequest): Seq[Node] = {
    val executorId = Option(request.getParameter("executorId")).map {
      executorId =>
        // Due to YARN-2844, "<driver>" in the url will be encoded to "%25253Cdriver%25253E" when
        // running in yarn-cluster mode. `request.getParameter("executorId")` will return
        // "%253Cdriver%253E". Therefore we need to decode it until we get the real id.
        var id = executorId
        var decodedId = URLDecoder.decode(id, "UTF-8")
        while (id != decodedId) {
          id = decodedId
          decodedId = URLDecoder.decode(id, "UTF-8")
        }
        id
    }.getOrElse {
      throw new IllegalArgumentException(s"Missing executorId parameter")
    }
    val time = System.currentTimeMillis()
    val maybeThreadDump = sc.get.getExecutorThreadDump(executorId)

    val content = maybeThreadDump.map { threadDump =>
      val dumpRows = threadDump.map { thread =>
        <div class="accordion-group">
          <div class="accordion-heading" onclick="$(this).next().toggleClass('hidden')">
            <a class="accordion-toggle">
              Thread {thread.threadId}: {thread.threadName} ({thread.threadState})
            </a>
          </div>
          <div class="accordion-body hidden">
            <div class="accordion-inner">
              <pre>{thread.stackTrace}</pre>
            </div>
          </div>
        </div>
      }

      <div class="row-fluid">
        <p>Updated at {UIUtils.formatDate(time)}</p>
        {
          // scalastyle:off
          <p><a class="expandbutton"
                onClick="$('.accordion-body').removeClass('hidden'); $('.expandbutton').toggleClass('hidden')">
            Expand All
          </a></p>
          <p><a class="expandbutton hidden"
                onClick="$('.accordion-body').addClass('hidden'); $('.expandbutton').toggleClass('hidden')">
            Collapse All
          </a></p>
          // scalastyle:on
        }
        <div class="accordion">{dumpRows}</div>
      </div>
    }.getOrElse(Text("Error fetching thread dump"))
    UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent)
  }
} 
Example 4
Source File: EnvironmentPage.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.env

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") {
  private val listener = parent.listener

  def render(request: HttpServletRequest): Seq[Node] = {
    val runtimeInformationTable = UIUtils.listingTable(
      propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true)
    val sparkPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.sparkProperties, fixedWidth = true)
    val systemPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true)
    val classpathEntriesTable = UIUtils.listingTable(
      classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true)
    val content =
      <span>
        <h4>Runtime Information</h4> {runtimeInformationTable}
        <h4>Spark Properties</h4> {sparkPropertiesTable}
        <h4>System Properties</h4> {systemPropertiesTable}
        <h4>Classpath Entries</h4> {classpathEntriesTable}
      </span>

    UIUtils.headerSparkPage("Environment", content, parent)
  }

  private def propertyHeader = Seq("Name", "Value")
  private def classPathHeaders = Seq("Resource", "Source")
  private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr>
} 
Example 5
Source File: StoragePage.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.storage

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.storage.RDDInfo
import org.apache.spark.ui.{UIUtils, WebUIPage}
import org.apache.spark.util.Utils


  private def rddRow(rdd: RDDInfo): Seq[Node] = {
    // scalastyle:off
    <tr>
      <td>
        <a href={"%s/storage/rdd?id=%s".format(UIUtils.prependBaseUri(parent.basePath), rdd.id)}>
          {rdd.name}
        </a>
      </td>
      <td>{rdd.storageLevel.description}
      </td>
      <td>{rdd.numCachedPartitions}</td>
      <td>{"%.0f%%".format(rdd.numCachedPartitions * 100.0 / rdd.numPartitions)}</td>
      <td sorttable_customkey={rdd.memSize.toString}>{Utils.bytesToString(rdd.memSize)}</td>
      <td sorttable_customkey={rdd.externalBlockStoreSize.toString}>{Utils.bytesToString(rdd.externalBlockStoreSize)}</td>
      <td sorttable_customkey={rdd.diskSize.toString} >{Utils.bytesToString(rdd.diskSize)}</td>
    </tr>
    // scalastyle:on
  }
} 
Example 6
Source File: PoolTable.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import scala.collection.mutable.HashMap
import scala.xml.Node

import org.apache.spark.scheduler.{Schedulable, StageInfo}
import org.apache.spark.ui.UIUtils


private[ui] class PoolTable(pools: Seq[Schedulable], parent: StagesTab) {
  private val listener = parent.progressListener

  def toNodeSeq: Seq[Node] = {
    listener.synchronized {
      poolTable(poolRow, pools)
    }
  }

  private def poolTable(
      makeRow: (Schedulable, HashMap[String, HashMap[Int, StageInfo]]) => Seq[Node],
      rows: Seq[Schedulable]): Seq[Node] = {
    <table class="table table-bordered table-striped table-condensed sortable table-fixed">
      <thead>
        <th>Pool Name</th>
        <th>Minimum Share</th>
        <th>Pool Weight</th>
        <th>Active Stages</th>
        <th>Running Tasks</th>
        <th>SchedulingMode</th>
      </thead>
      <tbody>
        {rows.map(r => makeRow(r, listener.poolToActiveStages))}
      </tbody>
    </table>
  }

  private def poolRow(
      p: Schedulable,
      poolToActiveStages: HashMap[String, HashMap[Int, StageInfo]]): Seq[Node] = {
    val activeStages = poolToActiveStages.get(p.name) match {
      case Some(stages) => stages.size
      case None => 0
    }
    val href = "%s/stages/pool?poolname=%s"
      .format(UIUtils.prependBaseUri(parent.basePath), p.name)
    <tr>
      <td>
        <a href={href}>{p.name}</a>
      </td>
      <td>{p.minShare}</td>
      <td>{p.weight}</td>
      <td>{activeStages}</td>
      <td>{p.runningTasks}</td>
      <td>{p.schedulingMode}</td>
    </tr>
  }
} 
Example 7
Source File: PoolPage.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.scheduler.StageInfo
import org.apache.spark.ui.{WebUIPage, UIUtils}


private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") {
  private val sc = parent.sc
  private val listener = parent.progressListener

  def render(request: HttpServletRequest): Seq[Node] = {
    listener.synchronized {
      val poolName = request.getParameter("poolname")
      require(poolName != null && poolName.nonEmpty, "Missing poolname parameter")

      val poolToActiveStages = listener.poolToActiveStages
      val activeStages = poolToActiveStages.get(poolName) match {
        case Some(s) => s.values.toSeq
        case None => Seq[StageInfo]()
      }
      val activeStagesTable = new StageTableBase(activeStages.sortBy(_.submissionTime).reverse,
        parent.basePath, parent.progressListener, isFairScheduler = parent.isFairScheduler,
        killEnabled = parent.killEnabled)

      // For now, pool information is only accessible in live UIs
      val pools = sc.map(_.getPoolForName(poolName).get).toSeq
      val poolTable = new PoolTable(pools, parent)

      val content =
        <h4>Summary </h4> ++ poolTable.toNodeSeq ++
        <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq

      UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent)
    }
  }
} 
Example 8
Source File: MesosClusterPage.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.mesos.ui

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.mesos.Protos.TaskStatus
import org.apache.spark.deploy.mesos.MesosDriverDescription
import org.apache.spark.scheduler.cluster.mesos.MesosClusterSubmissionState
import org.apache.spark.ui.{UIUtils, WebUIPage}

private[mesos] class MesosClusterPage(parent: MesosClusterUI) extends WebUIPage("") {
  def render(request: HttpServletRequest): Seq[Node] = {
    val state = parent.scheduler.getSchedulerState()
    val queuedHeaders = Seq("Driver ID", "Submit Date", "Main Class", "Driver Resources")
    val driverHeaders = queuedHeaders ++
      Seq("Start Date", "Mesos Slave ID", "State")
    val retryHeaders = Seq("Driver ID", "Submit Date", "Description") ++
      Seq("Last Failed Status", "Next Retry Time", "Attempt Count")
    val queuedTable = UIUtils.listingTable(queuedHeaders, queuedRow, state.queuedDrivers)
    val launchedTable = UIUtils.listingTable(driverHeaders, driverRow, state.launchedDrivers)
    val finishedTable = UIUtils.listingTable(driverHeaders, driverRow, state.finishedDrivers)
    val retryTable = UIUtils.listingTable(retryHeaders, retryRow, state.pendingRetryDrivers)
    val content =
      <p>Mesos Framework ID: {state.frameworkId}</p>
      <div class="row-fluid">
        <div class="span12">
          <h4>Queued Drivers:</h4>
          {queuedTable}
          <h4>Launched Drivers:</h4>
          {launchedTable}
          <h4>Finished Drivers:</h4>
          {finishedTable}
          <h4>Supervise drivers waiting for retry:</h4>
          {retryTable}
        </div>
      </div>;
    UIUtils.basicSparkPage(content, "Spark Drivers for Mesos cluster")
  }

  private def queuedRow(submission: MesosDriverDescription): Seq[Node] = {
    val id = submission.submissionId
    <tr>
      <td><a href={s"driver?id=$id"}>{id}</a></td>
      <td>{submission.submissionDate}</td>
      <td>{submission.command.mainClass}</td>
      <td>cpus: {submission.cores}, mem: {submission.mem}</td>
    </tr>
  }

  private def driverRow(state: MesosClusterSubmissionState): Seq[Node] = {
    val id = state.driverDescription.submissionId
    <tr>
      <td><a href={s"driver?id=$id"}>{id}</a></td>
      <td>{state.driverDescription.submissionDate}</td>
      <td>{state.driverDescription.command.mainClass}</td>
      <td>cpus: {state.driverDescription.cores}, mem: {state.driverDescription.mem}</td>
      <td>{state.startDate}</td>
      <td>{state.slaveId.getValue}</td>
      <td>{stateString(state.mesosTaskStatus)}</td>
    </tr>
  }

  private def retryRow(submission: MesosDriverDescription): Seq[Node] = {
    val id = submission.submissionId
    <tr>
      <td><a href={s"driver?id=$id"}>{id}</a></td>
      <td>{submission.submissionDate}</td>
      <td>{submission.command.mainClass}</td>
      <td>{submission.retryState.get.lastFailureStatus}</td>
      <td>{submission.retryState.get.nextRetry}</td>
      <td>{submission.retryState.get.retries}</td>
    </tr>
  }

  private def stateString(status: Option[TaskStatus]): String = {
    if (status.isEmpty) {
      return ""
    }
    val sb = new StringBuilder
    val s = status.get
    sb.append(s"State: ${s.getState}")
    if (status.get.hasMessage) {
      sb.append(s", Message: ${s.getMessage}")
    }
    if (status.get.hasHealthy) {
      sb.append(s", Healthy: ${s.getHealthy}")
    }
    if (status.get.hasSource) {
      sb.append(s", Source: ${s.getSource}")
    }
    if (status.get.hasReason) {
      sb.append(s", Reason: ${s.getReason}")
    }
    if (status.get.hasTimestamp) {
      sb.append(s", Time: ${s.getTimestamp}")
    }
    sb.toString()
  }
} 
Example 9
Source File: HistoryNotFoundPage.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master.ui

import java.net.URLDecoder
import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class HistoryNotFoundPage(parent: MasterWebUI)
  extends WebUIPage("history/not-found") {

  
  def render(request: HttpServletRequest): Seq[Node] = {
    val titleParam = request.getParameter("title")
    val msgParam = request.getParameter("msg")
    val exceptionParam = request.getParameter("exception")

    // If no parameters are specified, assume the user did not enable event logging
    //如果没有指定参数,假设用户未启用事件日志记录
    val defaultTitle = "Event logging is not enabled"
    val defaultContent =
      <div class="row-fluid">
        <div class="span12" style="font-size:14px">
          No event logs were found for this application! To
          <a href="http://spark.apache.org/docs/latest/monitoring.html">enable event logging</a>,
          set <span style="font-style:italic">spark.eventLog.enabled</span> to true and
          <span style="font-style:italic">spark.eventLog.dir</span> to the directory to which your
          event logs are written.
        </div>
      </div>

    val title = Option(titleParam).getOrElse(defaultTitle)
    val content = Option(msgParam)
      .map { msg => URLDecoder.decode(msg, "UTF-8") }
      .map { msg =>
        <div class="row-fluid">
          <div class="span12" style="font-size:14px">{msg}</div>
        </div> ++
        Option(exceptionParam)
          .map { e => URLDecoder.decode(e, "UTF-8") }
          .map { e => <pre>{e}</pre> }
          .getOrElse(Seq.empty)
      }.getOrElse(defaultContent)

    UIUtils.basicSparkPage(content, title)
  }
} 
Example 10
Source File: ApplicationPage.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master.ui

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.deploy.ExecutorState
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
import org.apache.spark.deploy.master.ExecutorDesc
import org.apache.spark.ui.{UIUtils, WebUIPage}
import org.apache.spark.util.Utils

private[ui] class ApplicationPage(parent: MasterWebUI) extends WebUIPage("app") {

  private val master = parent.masterEndpointRef

  
  def render(request: HttpServletRequest): Seq[Node] = {
    val appId = request.getParameter("appId")
    val state = master.askWithRetry[MasterStateResponse](RequestMasterState)
    val app = state.activeApps.find(_.id == appId).getOrElse({
      state.completedApps.find(_.id == appId).getOrElse(null)
    })
    if (app == null) {
      val msg = <div class="row-fluid">No running application with ID {appId}</div>
      return UIUtils.basicSparkPage(msg, "Not Found")
    }

    val executorHeaders = Seq("ExecutorID", "Worker", "Cores", "Memory", "State", "Logs")
    val allExecutors = (app.executors.values ++ app.removedExecutors).toSet.toSeq
    // This includes executors that are either still running or have exited cleanly
    //这包括仍在运行或已经完全退出的执行者
    val executors = allExecutors.filter { exec =>
      !ExecutorState.isFinished(exec.state) || exec.state == ExecutorState.EXITED
    }
    val removedExecutors = allExecutors.diff(executors)
    val executorsTable = UIUtils.listingTable(executorHeaders, executorRow, executors)
    val removedExecutorsTable = UIUtils.listingTable(executorHeaders, executorRow, removedExecutors)

    val content =
      <div class="row-fluid">
        <div class="span12">
          <ul class="unstyled">
            <li><strong>ID:</strong> {app.id}</li>
            <li><strong>Name:</strong> {app.desc.name}</li>
            <li><strong>User:</strong> {app.desc.user}</li>
            <li><strong>Cores:</strong>
            {
              if (app.desc.maxCores.isEmpty) {
                "Unlimited (%s granted)".format(app.coresGranted)
              } else {
                "%s (%s granted, %s left)".format(
                  app.desc.maxCores.get, app.coresGranted, app.coresLeft)
              }
            }
            </li>
            <li>
              <strong>Executor Memory:</strong>
              {Utils.megabytesToString(app.desc.memoryPerExecutorMB)}
            </li>
            <li><strong>Submit Date:</strong> {app.submitDate}</li>
            <li><strong>State:</strong> {app.state}</li>
            <li><strong><a href={app.desc.appUiUrl}>Application Detail UI</a></strong></li>
          </ul>
        </div>
      </div>

      <div class="row-fluid"> <!-- Executors -->
        <div class="span12">
          <h4> Executor Summary </h4>
          {executorsTable}
          {
            if (removedExecutors.nonEmpty) {
              <h4> Removed Executors </h4> ++
              removedExecutorsTable
            }
          }
        </div>
      </div>;
    UIUtils.basicSparkPage(content, "Application: " + app.desc.name)
  }

  private def executorRow(executor: ExecutorDesc): Seq[Node] = {
    <tr>
      <td>{executor.id}</td>
      <td>
        <a href={executor.worker.webUiAddress}>{executor.worker.id}</a>
      </td>
      <td>{executor.cores}</td>
      <td>{executor.memory}</td>
      <td>{executor.state}</td>
      <td>
        <a href={"%s/logPage?appId=%s&executorId=%s&logType=stdout"
          .format(executor.worker.webUiAddress, executor.application.id, executor.id)}>stdout</a>
        <a href={"%s/logPage?appId=%s&executorId=%s&logType=stderr"
          .format(executor.worker.webUiAddress, executor.application.id, executor.id)}>stderr</a>
      </td>
    </tr>
  }
} 
Example 11
Source File: ExecutorThreadDumpPage.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.exec

import java.net.URLDecoder
import javax.servlet.http.HttpServletRequest

import scala.util.Try
import scala.xml.{Text, Node}

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") {

  private val sc = parent.sc

  def render(request: HttpServletRequest): Seq[Node] = {
    val executorId = Option(request.getParameter("executorId")).map {
      executorId =>
        // Due to YARN-2844, "<driver>" in the url will be encoded to "%25253Cdriver%25253E" when
        // running in yarn-cluster mode. `request.getParameter("executorId")` will return
        // "%253Cdriver%253E". Therefore we need to decode it until we get the real id.
        var id = executorId
        var decodedId = URLDecoder.decode(id, "UTF-8")
        while (id != decodedId) {
          id = decodedId
          decodedId = URLDecoder.decode(id, "UTF-8")
        }
        id
    }.getOrElse {
      throw new IllegalArgumentException(s"Missing executorId parameter")
    }
    val time = System.currentTimeMillis()
    val maybeThreadDump = sc.get.getExecutorThreadDump(executorId)

    val content = maybeThreadDump.map { threadDump =>
      val dumpRows = threadDump.sortWith {
        case (threadTrace1, threadTrace2) => {
          val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0
          val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0
          if (v1 == v2) {
            threadTrace1.threadName.toLowerCase < threadTrace2.threadName.toLowerCase
          } else {
            v1 > v2
          }
        }
      }.map { thread =>
        val threadName = thread.threadName
        val className = "accordion-heading " + {
          if (threadName.contains("Executor task launch")) {
            "executor-thread"
          } else {
            "non-executor-thread"
          }
        }
        <div class="accordion-group">
          <div class={className} onclick="$(this).next().toggleClass('hidden')">
            <a class="accordion-toggle">
              Thread {thread.threadId}: {threadName} ({thread.threadState})
            </a>
          </div>
          <div class="accordion-body hidden">
            <div class="accordion-inner">
              <pre>{thread.stackTrace}</pre>
            </div>
          </div>
        </div>
      }

      <div class="row-fluid">
        <p>Updated at {UIUtils.formatDate(time)}</p>
        {
          // scalastyle:off
          <p><a class="expandbutton"
                onClick="$('.accordion-body').removeClass('hidden'); $('.expandbutton').toggleClass('hidden')">
            Expand All
          </a></p>
          <p><a class="expandbutton hidden"
                onClick="$('.accordion-body').addClass('hidden'); $('.expandbutton').toggleClass('hidden')">
            Collapse All
          </a></p>
          // scalastyle:on
        }
        <div class="accordion">{dumpRows}</div>
      </div>
    }.getOrElse(Text("Error fetching thread dump"))
    UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent)
  }
} 
Example 12
Source File: EnvironmentPage.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.env

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") {
  private val listener = parent.listener

  def render(request: HttpServletRequest): Seq[Node] = {
    val runtimeInformationTable = UIUtils.listingTable(
      propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true)
    val sparkPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.sparkProperties, fixedWidth = true)
    val systemPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true)
    val classpathEntriesTable = UIUtils.listingTable(
      classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true)
    val content =
      <span>
        <h4>Runtime Information</h4> {runtimeInformationTable}
        <h4>Spark Properties</h4> {sparkPropertiesTable}
        <h4>System Properties</h4> {systemPropertiesTable}
        <h4>Classpath Entries</h4> {classpathEntriesTable}
      </span>

    UIUtils.headerSparkPage("Environment", content, parent)
  }

  private def propertyHeader = Seq("Name", "Value")
  private def classPathHeaders = Seq("Resource", "Source")
  private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr>
} 
Example 13
Source File: PoolTable.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import scala.collection.mutable.HashMap
import scala.xml.Node

import org.apache.spark.scheduler.{Schedulable, StageInfo}
import org.apache.spark.ui.UIUtils


private[ui] class PoolTable(pools: Seq[Schedulable], parent: StagesTab) {
  private val listener = parent.progressListener

  def toNodeSeq: Seq[Node] = {
    listener.synchronized {
      poolTable(poolRow, pools)
    }
  }

  private def poolTable(
      makeRow: (Schedulable, HashMap[String, HashMap[Int, StageInfo]]) => Seq[Node],
      rows: Seq[Schedulable]): Seq[Node] = {
    <table class="table table-bordered table-striped table-condensed sortable table-fixed">
      <thead>
        <th>Pool Name</th>
        <th>Minimum Share</th>
        <th>Pool Weight</th>
        <th>Active Stages</th>
        <th>Running Tasks</th>
        <th>SchedulingMode</th>
      </thead>
      <tbody>
        {rows.map(r => makeRow(r, listener.poolToActiveStages))}
      </tbody>
    </table>
  }

  private def poolRow(
      p: Schedulable,
      poolToActiveStages: HashMap[String, HashMap[Int, StageInfo]]): Seq[Node] = {
    val activeStages = poolToActiveStages.get(p.name) match {
      case Some(stages) => stages.size
      case None => 0
    }
    val href = "%s/stages/pool?poolname=%s"
      .format(UIUtils.prependBaseUri(parent.basePath), p.name)
    <tr>
      <td>
        <a href={href}>{p.name}</a>
      </td>
      <td>{p.minShare}</td>
      <td>{p.weight}</td>
      <td>{activeStages}</td>
      <td>{p.runningTasks}</td>
      <td>{p.schedulingMode}</td>
    </tr>
  }
} 
Example 14
Source File: PoolPage.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.scheduler.StageInfo
import org.apache.spark.ui.{WebUIPage, UIUtils}


private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") {
  private val sc = parent.sc
  private val listener = parent.progressListener

  def render(request: HttpServletRequest): Seq[Node] = {
    listener.synchronized {
      val poolName = request.getParameter("poolname")
      require(poolName != null && poolName.nonEmpty, "Missing poolname parameter")

      val poolToActiveStages = listener.poolToActiveStages
      val activeStages = poolToActiveStages.get(poolName) match {
        case Some(s) => s.values.toSeq
        case None => Seq[StageInfo]()
      }
      val activeStagesTable = new StageTableBase(activeStages.sortBy(_.submissionTime).reverse,
        parent.basePath, parent.progressListener, isFairScheduler = parent.isFairScheduler,
        killEnabled = parent.killEnabled)

      // For now, pool information is only accessible in live UIs
      //现在,池信息只能在实时UI中访问
      val pools = sc.map(_.getPoolForName(poolName).get).toSeq
      val poolTable = new PoolTable(pools, parent)

      val content =
        <h4>Summary </h4> ++ poolTable.toNodeSeq ++
        <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq

      UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent)
    }
  }
} 
Example 15
Source File: MesosClusterPage.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.mesos.ui

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.mesos.Protos.TaskStatus
import org.apache.spark.deploy.mesos.MesosDriverDescription
import org.apache.spark.scheduler.cluster.mesos.MesosClusterSubmissionState
import org.apache.spark.ui.{UIUtils, WebUIPage}

private[mesos] class MesosClusterPage(parent: MesosClusterUI) extends WebUIPage("") {
  def render(request: HttpServletRequest): Seq[Node] = {
    val state = parent.scheduler.getSchedulerState()
    val queuedHeaders = Seq("Driver ID", "Submit Date", "Main Class", "Driver Resources")
    val driverHeaders = queuedHeaders ++
      Seq("Start Date", "Mesos Slave ID", "State")
    val retryHeaders = Seq("Driver ID", "Submit Date", "Description") ++
      Seq("Last Failed Status", "Next Retry Time", "Attempt Count")
    val queuedTable = UIUtils.listingTable(queuedHeaders, queuedRow, state.queuedDrivers)
    val launchedTable = UIUtils.listingTable(driverHeaders, driverRow, state.launchedDrivers)
    val finishedTable = UIUtils.listingTable(driverHeaders, driverRow, state.finishedDrivers)
    val retryTable = UIUtils.listingTable(retryHeaders, retryRow, state.pendingRetryDrivers)
    val content =
      <p>Mesos Framework ID: {state.frameworkId}</p>
      <div class="row-fluid">
        <div class="span12">
          <h4>Queued Drivers:</h4>
          {queuedTable}
          <h4>Launched Drivers:</h4>
          {launchedTable}
          <h4>Finished Drivers:</h4>
          {finishedTable}
          <h4>Supervise drivers waiting for retry:</h4>
          {retryTable}
        </div>
      </div>;
    UIUtils.basicSparkPage(content, "Spark Drivers for Mesos cluster")
  }

  private def queuedRow(submission: MesosDriverDescription): Seq[Node] = {
    val id = submission.submissionId
    <tr>
      <td><a href={s"driver?id=$id"}>{id}</a></td>
      <td>{submission.submissionDate}</td>
      <td>{submission.command.mainClass}</td>
      <td>cpus: {submission.cores}, mem: {submission.mem}</td>
    </tr>
  }

  private def driverRow(state: MesosClusterSubmissionState): Seq[Node] = {
    val id = state.driverDescription.submissionId
    <tr>
      <td><a href={s"driver?id=$id"}>{id}</a></td>
      <td>{state.driverDescription.submissionDate}</td>
      <td>{state.driverDescription.command.mainClass}</td>
      <td>cpus: {state.driverDescription.cores}, mem: {state.driverDescription.mem}</td>
      <td>{state.startDate}</td>
      <td>{state.slaveId.getValue}</td>
      <td>{stateString(state.mesosTaskStatus)}</td>
    </tr>
  }

  private def retryRow(submission: MesosDriverDescription): Seq[Node] = {
    val id = submission.submissionId
    <tr>
      <td><a href={s"driver?id=$id"}>{id}</a></td>
      <td>{submission.submissionDate}</td>
      <td>{submission.command.mainClass}</td>
      <td>{submission.retryState.get.lastFailureStatus}</td>
      <td>{submission.retryState.get.nextRetry}</td>
      <td>{submission.retryState.get.retries}</td>
    </tr>
  }

  private def stateString(status: Option[TaskStatus]): String = {
    if (status.isEmpty) {
      return ""
    }
    val sb = new StringBuilder
    val s = status.get
    sb.append(s"State: ${s.getState}")
    if (status.get.hasMessage) {
      sb.append(s", Message: ${s.getMessage}")
    }
    if (status.get.hasHealthy) {
      sb.append(s", Healthy: ${s.getHealthy}")
    }
    if (status.get.hasSource) {
      sb.append(s", Source: ${s.getSource}")
    }
    if (status.get.hasReason) {
      sb.append(s", Reason: ${s.getReason}")
    }
    if (status.get.hasTimestamp) {
      sb.append(s", Time: ${s.getTimestamp}")
    }
    sb.toString()
  }
} 
Example 16
Source File: ExecutorsTab.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.exec

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{SparkUI, SparkUITab, UIUtils, WebUIPage}

private[ui] class ExecutorsTab(parent: SparkUI) extends SparkUITab(parent, "executors") {

  init()

  private def init(): Unit = {
    val threadDumpEnabled =
      parent.sc.isDefined && parent.conf.getBoolean("spark.ui.threadDumpsEnabled", true)

    attachPage(new ExecutorsPage(this, threadDumpEnabled))
    if (threadDumpEnabled) {
      attachPage(new ExecutorThreadDumpPage(this, parent.sc))
    }
  }

}

private[ui] class ExecutorsPage(
    parent: SparkUITab,
    threadDumpEnabled: Boolean)
  extends WebUIPage("") {

  def render(request: HttpServletRequest): Seq[Node] = {
    val content =
      <div>
        {
          <div id="active-executors" class="row-fluid"></div> ++
          <script src={UIUtils.prependBaseUri("/static/utils.js")}></script> ++
          <script src={UIUtils.prependBaseUri("/static/executorspage.js")}></script> ++
          <script>setThreadDumpEnabled({threadDumpEnabled})</script>
        }
      </div>

    UIUtils.headerSparkPage("Executors", content, parent, useDataTables = true)
  }
} 
Example 17
Source File: ExecutorThreadDumpPage.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.exec

import java.util.Locale
import javax.servlet.http.HttpServletRequest

import scala.xml.{Node, Text}

import org.apache.spark.SparkContext
import org.apache.spark.ui.{SparkUITab, UIUtils, WebUIPage}

private[ui] class ExecutorThreadDumpPage(
    parent: SparkUITab,
    sc: Option[SparkContext]) extends WebUIPage("threadDump") {

  // stripXSS is called first to remove suspicious characters used in XSS attacks
  def render(request: HttpServletRequest): Seq[Node] = {
    val executorId =
      Option(UIUtils.stripXSS(request.getParameter("executorId"))).map { executorId =>
      UIUtils.decodeURLParameter(executorId)
    }.getOrElse {
      throw new IllegalArgumentException(s"Missing executorId parameter")
    }
    val time = System.currentTimeMillis()
    val maybeThreadDump = sc.get.getExecutorThreadDump(executorId)

    val content = maybeThreadDump.map { threadDump =>
      val dumpRows = threadDump.sortWith {
        case (threadTrace1, threadTrace2) =>
          val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0
          val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0
          if (v1 == v2) {
            threadTrace1.threadName.toLowerCase(Locale.ROOT) <
              threadTrace2.threadName.toLowerCase(Locale.ROOT)
          } else {
            v1 > v2
          }
      }.map { thread =>
        val threadId = thread.threadId
        val blockedBy = thread.blockedByThreadId match {
          case Some(_) =>
            <div>
              Blocked by <a href={s"#${thread.blockedByThreadId}_td_id"}>
              Thread {thread.blockedByThreadId} {thread.blockedByLock}</a>
            </div>
          case None => Text("")
        }
        val heldLocks = thread.holdingLocks.mkString(", ")

        <tr id={s"thread_${threadId}_tr"} class="accordion-heading"
            onclick={s"toggleThreadStackTrace($threadId, false)"}
            onmouseover={s"onMouseOverAndOut($threadId)"}
            onmouseout={s"onMouseOverAndOut($threadId)"}>
          <td id={s"${threadId}_td_id"}>{threadId}</td>
          <td id={s"${threadId}_td_name"}>{thread.threadName}</td>
          <td id={s"${threadId}_td_state"}>{thread.threadState}</td>
          <td id={s"${threadId}_td_locking"}>{blockedBy}{heldLocks}</td>
          <td id={s"${threadId}_td_stacktrace"} class="hidden">{thread.stackTrace}</td>
        </tr>
      }

    <div class="row-fluid">
      <p>Updated at {UIUtils.formatDate(time)}</p>
      {
        // scalastyle:off
        <p><a class="expandbutton" onClick="expandAllThreadStackTrace(true)">
          Expand All
        </a></p>
        <p><a class="expandbutton hidden" onClick="collapseAllThreadStackTrace(true)">
          Collapse All
        </a></p>
        <div class="form-inline">
        <div class="bs-example" data-example-id="simple-form-inline">
          <div class="form-group">
            <div class="input-group">
              Search: <input type="text" class="form-control" id="search" oninput="onSearchStringChange()"></input>
            </div>
          </div>
        </div>
        </div>
        <p></p>
        // scalastyle:on
      }
      <table class={UIUtils.TABLE_CLASS_STRIPED + " accordion-group" + " sortable"}>
        <thead>
          <th onClick="collapseAllThreadStackTrace(false)">Thread ID</th>
          <th onClick="collapseAllThreadStackTrace(false)">Thread Name</th>
          <th onClick="collapseAllThreadStackTrace(false)">Thread State</th>
          <th onClick="collapseAllThreadStackTrace(false)">Thread Locks</th>
        </thead>
        <tbody>{dumpRows}</tbody>
      </table>
    </div>
    }.getOrElse(Text("Error fetching thread dump"))
    UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent)
  }
} 
Example 18
Source File: StagesTab.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import javax.servlet.http.HttpServletRequest

import org.apache.spark.scheduler.SchedulingMode
import org.apache.spark.status.AppStatusStore
import org.apache.spark.status.api.v1.StageStatus
import org.apache.spark.ui.{SparkUI, SparkUITab, UIUtils}


private[ui] class StagesTab(val parent: SparkUI, val store: AppStatusStore)
  extends SparkUITab(parent, "stages") {

  val sc = parent.sc
  val conf = parent.conf
  val killEnabled = parent.killEnabled

  attachPage(new AllStagesPage(this))
  attachPage(new StagePage(this, store))
  attachPage(new PoolPage(this))

  def isFairScheduler: Boolean = {
    store
      .environmentInfo()
      .sparkProperties
      .contains(("spark.scheduler.mode", SchedulingMode.FAIR.toString))
  }

  def handleKillRequest(request: HttpServletRequest): Unit = {
    if (killEnabled && parent.securityManager.checkModifyPermissions(request.getRemoteUser)) {
      // stripXSS is called first to remove suspicious characters used in XSS attacks
      val stageId = Option(UIUtils.stripXSS(request.getParameter("id"))).map(_.toInt)
      stageId.foreach { id =>
        store.asOption(store.lastStageAttempt(id)).foreach { stage =>
          val status = stage.status
          if (status == StageStatus.ACTIVE || status == StageStatus.PENDING) {
            sc.foreach(_.cancelStage(id, "killed via the Web UI"))
            // Do a quick pause here to give Spark time to kill the stage so it shows up as
            // killed after the refresh. Note that this will block the serving thread so the
            // time should be limited in duration.
            Thread.sleep(100)
          }
        }
      }
    }
  }

} 
Example 19
Source File: PoolTable.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import java.net.URLEncoder

import scala.xml.Node

import org.apache.spark.scheduler.Schedulable
import org.apache.spark.status.PoolData
import org.apache.spark.ui.UIUtils


private[ui] class PoolTable(pools: Map[Schedulable, PoolData], parent: StagesTab) {

  def toNodeSeq: Seq[Node] = {
    <table class="table table-bordered table-striped table-condensed sortable table-fixed">
      <thead>
        <th>Pool Name</th>
        <th>Minimum Share</th>
        <th>Pool Weight</th>
        <th>Active Stages</th>
        <th>Running Tasks</th>
        <th>SchedulingMode</th>
      </thead>
      <tbody>
        {pools.map { case (s, p) => poolRow(s, p) }}
      </tbody>
    </table>
  }

  private def poolRow(s: Schedulable, p: PoolData): Seq[Node] = {
    val activeStages = p.stageIds.size
    val href = "%s/stages/pool?poolname=%s"
      .format(UIUtils.prependBaseUri(parent.basePath), URLEncoder.encode(p.name, "UTF-8"))
    <tr>
      <td>
        <a href={href}>{p.name}</a>
      </td>
      <td>{s.minShare}</td>
      <td>{s.weight}</td>
      <td>{activeStages}</td>
      <td>{s.runningTasks}</td>
      <td>{s.schedulingMode}</td>
    </tr>
  }
} 
Example 20
Source File: PoolPage.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.status.PoolData
import org.apache.spark.status.api.v1._
import org.apache.spark.ui.{UIUtils, WebUIPage}


private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") {

  def render(request: HttpServletRequest): Seq[Node] = {
    // stripXSS is called first to remove suspicious characters used in XSS attacks
    val poolName = Option(UIUtils.stripXSS(request.getParameter("poolname"))).map { poolname =>
      UIUtils.decodeURLParameter(poolname)
    }.getOrElse {
      throw new IllegalArgumentException(s"Missing poolname parameter")
    }

    // For now, pool information is only accessible in live UIs
    val pool = parent.sc.flatMap(_.getPoolForName(poolName)).getOrElse {
      throw new IllegalArgumentException(s"Unknown pool: $poolName")
    }

    val uiPool = parent.store.asOption(parent.store.pool(poolName)).getOrElse(
      new PoolData(poolName, Set()))
    val activeStages = uiPool.stageIds.toSeq.map(parent.store.lastStageAttempt(_))
    val activeStagesTable =
      new StageTableBase(parent.store, request, activeStages, "", "activeStage", parent.basePath,
        "stages/pool", parent.isFairScheduler, parent.killEnabled, false)

    val poolTable = new PoolTable(Map(pool -> uiPool), parent)
    var content = <h4>Summary </h4> ++ poolTable.toNodeSeq
    if (activeStages.nonEmpty) {
      content ++= <h4>Active Stages ({activeStages.size})</h4> ++ activeStagesTable.toNodeSeq
    }

    UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent)
  }
} 
Example 21
Source File: MesosClusterPage.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.mesos.ui

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.mesos.Protos.TaskStatus
import org.apache.spark.deploy.mesos.MesosDriverDescription
import org.apache.spark.scheduler.cluster.mesos.MesosClusterSubmissionState
import org.apache.spark.ui.{UIUtils, WebUIPage}

private[mesos] class MesosClusterPage(parent: MesosClusterUI) extends WebUIPage("") {
  def render(request: HttpServletRequest): Seq[Node] = {
    val state = parent.scheduler.getSchedulerState()
    val queuedHeaders = Seq("Driver ID", "Submit Date", "Main Class", "Driver Resources")
    val driverHeaders = queuedHeaders ++
      Seq("Start Date", "Mesos Slave ID", "State")
    val retryHeaders = Seq("Driver ID", "Submit Date", "Description") ++
      Seq("Last Failed Status", "Next Retry Time", "Attempt Count")
    val queuedTable = UIUtils.listingTable(queuedHeaders, queuedRow, state.queuedDrivers)
    val launchedTable = UIUtils.listingTable(driverHeaders, driverRow, state.launchedDrivers)
    val finishedTable = UIUtils.listingTable(driverHeaders, driverRow, state.finishedDrivers)
    val retryTable = UIUtils.listingTable(retryHeaders, retryRow, state.pendingRetryDrivers)
    val content =
      <p>Mesos Framework ID: {state.frameworkId}</p>
      <div class="row-fluid">
        <div class="span12">
          <h4>Queued Drivers:</h4>
          {queuedTable}
          <h4>Launched Drivers:</h4>
          {launchedTable}
          <h4>Finished Drivers:</h4>
          {finishedTable}
          <h4>Supervise drivers waiting for retry:</h4>
          {retryTable}
        </div>
      </div>;
    UIUtils.basicSparkPage(content, "Spark Drivers for Mesos cluster")
  }

  private def queuedRow(submission: MesosDriverDescription): Seq[Node] = {
    val id = submission.submissionId
    <tr>
      <td><a href={s"driver?id=$id"}>{id}</a></td>
      <td>{submission.submissionDate}</td>
      <td>{submission.command.mainClass}</td>
      <td>cpus: {submission.cores}, mem: {submission.mem}</td>
    </tr>
  }

  private def driverRow(state: MesosClusterSubmissionState): Seq[Node] = {
    val id = state.driverDescription.submissionId
    <tr>
      <td><a href={s"driver?id=$id"}>{id}</a></td>
      <td>{state.driverDescription.submissionDate}</td>
      <td>{state.driverDescription.command.mainClass}</td>
      <td>cpus: {state.driverDescription.cores}, mem: {state.driverDescription.mem}</td>
      <td>{state.startDate}</td>
      <td>{state.slaveId.getValue}</td>
      <td>{stateString(state.mesosTaskStatus)}</td>
    </tr>
  }

  private def retryRow(submission: MesosDriverDescription): Seq[Node] = {
    val id = submission.submissionId
    <tr>
      <td><a href={s"driver?id=$id"}>{id}</a></td>
      <td>{submission.submissionDate}</td>
      <td>{submission.command.mainClass}</td>
      <td>{submission.retryState.get.lastFailureStatus}</td>
      <td>{submission.retryState.get.nextRetry}</td>
      <td>{submission.retryState.get.retries}</td>
    </tr>
  }

  private def stateString(status: Option[TaskStatus]): String = {
    if (status.isEmpty) {
      return ""
    }
    val sb = new StringBuilder
    val s = status.get
    sb.append(s"State: ${s.getState}")
    if (status.get.hasMessage) {
      sb.append(s", Message: ${s.getMessage}")
    }
    if (status.get.hasHealthy) {
      sb.append(s", Healthy: ${s.getHealthy}")
    }
    if (status.get.hasSource) {
      sb.append(s", Source: ${s.getSource}")
    }
    if (status.get.hasReason) {
      sb.append(s", Reason: ${s.getReason}")
    }
    if (status.get.hasTimestamp) {
      sb.append(s", Time: ${s.getTimestamp}")
    }
    sb.toString()
  }
} 
Example 22
Source File: HistoryNotFoundPage.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master.ui

import java.net.URLDecoder
import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class HistoryNotFoundPage(parent: MasterWebUI)
  extends WebUIPage("history/not-found") {

  
  def render(request: HttpServletRequest): Seq[Node] = {
    val titleParam = request.getParameter("title")
    val msgParam = request.getParameter("msg")
    val exceptionParam = request.getParameter("exception")

    // If no parameters are specified, assume the user did not enable event logging
    val defaultTitle = "Event logging is not enabled"
    val defaultContent =
      <div class="row-fluid">
        <div class="span12" style="font-size:14px">
          No event logs were found for this application! To
          <a href="http://spark.apache.org/docs/latest/monitoring.html">enable event logging</a>,
          set <span style="font-style:italic">spark.eventLog.enabled</span> to true and
          <span style="font-style:italic">spark.eventLog.dir</span> to the directory to which your
          event logs are written.
        </div>
      </div>

    val title = Option(titleParam).getOrElse(defaultTitle)
    val content = Option(msgParam)
      .map { msg => URLDecoder.decode(msg, "UTF-8") }
      .map { msg =>
        <div class="row-fluid">
          <div class="span12" style="font-size:14px">{msg}</div>
        </div> ++
        Option(exceptionParam)
          .map { e => URLDecoder.decode(e, "UTF-8") }
          .map { e => <pre>{e}</pre> }
          .getOrElse(Seq.empty)
      }.getOrElse(defaultContent)

    UIUtils.basicSparkPage(content, title)
  }
} 
Example 23
Source File: ApplicationPage.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master.ui

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.deploy.ExecutorState
import org.apache.spark.deploy.DeployMessages.{MasterStateResponse, RequestMasterState}
import org.apache.spark.deploy.master.ExecutorDesc
import org.apache.spark.ui.{UIUtils, WebUIPage}
import org.apache.spark.util.Utils

private[ui] class ApplicationPage(parent: MasterWebUI) extends WebUIPage("app") {

  private val master = parent.masterEndpointRef

  
  def render(request: HttpServletRequest): Seq[Node] = {
    val appId = request.getParameter("appId")
    val state = master.askWithRetry[MasterStateResponse](RequestMasterState)
    val app = state.activeApps.find(_.id == appId).getOrElse({
      state.completedApps.find(_.id == appId).getOrElse(null)
    })
    if (app == null) {
      val msg = <div class="row-fluid">No running application with ID {appId}</div>
      return UIUtils.basicSparkPage(msg, "Not Found")
    }

    val executorHeaders = Seq("ExecutorID", "Worker", "Cores", "Memory", "State", "Logs")
    val allExecutors = (app.executors.values ++ app.removedExecutors).toSet.toSeq
    // This includes executors that are either still running or have exited cleanly
    val executors = allExecutors.filter { exec =>
      !ExecutorState.isFinished(exec.state) || exec.state == ExecutorState.EXITED
    }
    val removedExecutors = allExecutors.diff(executors)
    val executorsTable = UIUtils.listingTable(executorHeaders, executorRow, executors)
    val removedExecutorsTable = UIUtils.listingTable(executorHeaders, executorRow, removedExecutors)

    val content =
      <div class="row-fluid">
        <div class="span12">
          <ul class="unstyled">
            <li><strong>ID:</strong> {app.id}</li>
            <li><strong>Name:</strong> {app.desc.name}</li>
            <li><strong>User:</strong> {app.desc.user}</li>
            <li><strong>Cores:</strong>
            {
              if (app.desc.maxCores.isEmpty) {
                "Unlimited (%s granted)".format(app.coresGranted)
              } else {
                "%s (%s granted, %s left)".format(
                  app.desc.maxCores.get, app.coresGranted, app.coresLeft)
              }
            }
            </li>
            <li>
              <strong>Executor Memory:</strong>
              {Utils.megabytesToString(app.desc.memoryPerExecutorMB)}
            </li>
            <li><strong>Submit Date:</strong> {app.submitDate}</li>
            <li><strong>State:</strong> {app.state}</li>
            <li><strong><a href={app.curAppUIUrl}>Application Detail UI</a></strong></li>
          </ul>
        </div>
      </div>

      <div class="row-fluid"> <!-- Executors -->
        <div class="span12">
          <h4> Executor Summary </h4>
          {executorsTable}
          {
            if (removedExecutors.nonEmpty) {
              <h4> Removed Executors </h4> ++
              removedExecutorsTable
            }
          }
        </div>
      </div>;
    UIUtils.basicSparkPage(content, "Application: " + app.desc.name)
  }

  private def executorRow(executor: ExecutorDesc): Seq[Node] = {
    <tr>
      <td>{executor.id}</td>
      <td>
        <a href={executor.worker.webUiAddress}>{executor.worker.id}</a>
      </td>
      <td>{executor.cores}</td>
      <td>{executor.memory}</td>
      <td>{executor.state}</td>
      <td>
        <a href={"%s/logPage?appId=%s&executorId=%s&logType=stdout"
          .format(executor.worker.webUiAddress, executor.application.id, executor.id)}>stdout</a>
        <a href={"%s/logPage?appId=%s&executorId=%s&logType=stderr"
          .format(executor.worker.webUiAddress, executor.application.id, executor.id)}>stderr</a>
      </td>
    </tr>
  }
} 
Example 24
Source File: ExecutorThreadDumpPage.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.exec

import javax.servlet.http.HttpServletRequest

import scala.util.Try
import scala.xml.{Text, Node}

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") {

  private val sc = parent.sc

  def render(request: HttpServletRequest): Seq[Node] = {
    val executorId = Option(request.getParameter("executorId")).map { executorId =>
      UIUtils.decodeURLParameter(executorId)
    }.getOrElse {
      throw new IllegalArgumentException(s"Missing executorId parameter")
    }
    val time = System.currentTimeMillis()
    val maybeThreadDump = sc.get.getExecutorThreadDump(executorId)

    val content = maybeThreadDump.map { threadDump =>
      val dumpRows = threadDump.sortWith {
        case (threadTrace1, threadTrace2) => {
          val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0
          val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0
          if (v1 == v2) {
            threadTrace1.threadName.toLowerCase < threadTrace2.threadName.toLowerCase
          } else {
            v1 > v2
          }
        }
      }.map { thread =>
        val threadName = thread.threadName
        val className = "accordion-heading " + {
          if (threadName.contains("Executor task launch")) {
            "executor-thread"
          } else {
            "non-executor-thread"
          }
        }
        <div class="accordion-group">
          <div class={className} onclick="$(this).next().toggleClass('hidden')">
            <a class="accordion-toggle">
              Thread {thread.threadId}: {threadName} ({thread.threadState})
            </a>
          </div>
          <div class="accordion-body hidden">
            <div class="accordion-inner">
              <pre>{thread.stackTrace}</pre>
            </div>
          </div>
        </div>
      }

      <div class="row-fluid">
        <p>Updated at {UIUtils.formatDate(time)}</p>
        {
          // scalastyle:off
          <p><a class="expandbutton"
                onClick="$('.accordion-body').removeClass('hidden'); $('.expandbutton').toggleClass('hidden')">
            Expand All
          </a></p>
          <p><a class="expandbutton hidden"
                onClick="$('.accordion-body').addClass('hidden'); $('.expandbutton').toggleClass('hidden')">
            Collapse All
          </a></p>
          // scalastyle:on
        }
        <div class="accordion">{dumpRows}</div>
      </div>
    }.getOrElse(Text("Error fetching thread dump"))
    UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent)
  }
} 
Example 25
Source File: EnvironmentPage.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.env

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") {
  private val listener = parent.listener

  def render(request: HttpServletRequest): Seq[Node] = {
    val runtimeInformationTable = UIUtils.listingTable(
      propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true)
    val sparkPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.sparkProperties, fixedWidth = true)
    val systemPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true)
    val classpathEntriesTable = UIUtils.listingTable(
      classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true)
    val content =
      <span>
        <h4>Runtime Information</h4> {runtimeInformationTable}
        <h4>Spark Properties</h4> {sparkPropertiesTable}
        <h4>System Properties</h4> {systemPropertiesTable}
        <h4>Classpath Entries</h4> {classpathEntriesTable}
      </span>

    UIUtils.headerSparkPage("Environment", content, parent)
  }

  private def propertyHeader = Seq("Name", "Value")
  private def classPathHeaders = Seq("Resource", "Source")
  private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr>
} 
Example 26
Source File: PoolTable.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import java.net.URLEncoder

import scala.collection.mutable.HashMap
import scala.xml.Node

import org.apache.spark.scheduler.{Schedulable, StageInfo}
import org.apache.spark.ui.UIUtils


private[ui] class PoolTable(pools: Seq[Schedulable], parent: StagesTab) {
  private val listener = parent.progressListener

  def toNodeSeq: Seq[Node] = {
    listener.synchronized {
      poolTable(poolRow, pools)
    }
  }

  private def poolTable(
      makeRow: (Schedulable, HashMap[String, HashMap[Int, StageInfo]]) => Seq[Node],
      rows: Seq[Schedulable]): Seq[Node] = {
    <table class="table table-bordered table-striped table-condensed sortable table-fixed">
      <thead>
        <th>Pool Name</th>
        <th>Minimum Share</th>
        <th>Pool Weight</th>
        <th>Active Stages</th>
        <th>Running Tasks</th>
        <th>SchedulingMode</th>
      </thead>
      <tbody>
        {rows.map(r => makeRow(r, listener.poolToActiveStages))}
      </tbody>
    </table>
  }

  private def poolRow(
      p: Schedulable,
      poolToActiveStages: HashMap[String, HashMap[Int, StageInfo]]): Seq[Node] = {
    val activeStages = poolToActiveStages.get(p.name) match {
      case Some(stages) => stages.size
      case None => 0
    }
    val href = "%s/stages/pool?poolname=%s"
      .format(UIUtils.prependBaseUri(parent.basePath), URLEncoder.encode(p.name, "UTF-8"))
    <tr>
      <td>
        <a href={href}>{p.name}</a>
      </td>
      <td>{p.minShare}</td>
      <td>{p.weight}</td>
      <td>{activeStages}</td>
      <td>{p.runningTasks}</td>
      <td>{p.schedulingMode}</td>
    </tr>
  }
} 
Example 27
Source File: PoolPage.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.scheduler.StageInfo
import org.apache.spark.ui.{WebUIPage, UIUtils}


private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") {
  private val sc = parent.sc
  private val listener = parent.progressListener

  def render(request: HttpServletRequest): Seq[Node] = {
    listener.synchronized {
      val poolName = Option(request.getParameter("poolname")).map { poolname =>
        UIUtils.decodeURLParameter(poolname)
      }.getOrElse {
        throw new IllegalArgumentException(s"Missing poolname parameter")
      }

      val poolToActiveStages = listener.poolToActiveStages
      val activeStages = poolToActiveStages.get(poolName) match {
        case Some(s) => s.values.toSeq
        case None => Seq[StageInfo]()
      }
      val activeStagesTable = new StageTableBase(activeStages.sortBy(_.submissionTime).reverse,
        parent.basePath, parent.progressListener, isFairScheduler = parent.isFairScheduler,
        killEnabled = parent.killEnabled)

      // For now, pool information is only accessible in live UIs
      val pools = sc.map(_.getPoolForName(poolName).getOrElse {
        throw new IllegalArgumentException(s"Unknown poolname: $poolName")
      }).toSeq
      val poolTable = new PoolTable(pools, parent)

      val content =
        <h4>Summary </h4> ++ poolTable.toNodeSeq ++
        <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq

      UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent)
    }
  }
} 
Example 28
Source File: HistoryNotFoundPage.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master.ui

import java.net.URLDecoder
import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[spark] class HistoryNotFoundPage(parent: MasterWebUI)
  extends WebUIPage("history/not-found") {

  
  def render(request: HttpServletRequest): Seq[Node] = {
    val titleParam = request.getParameter("title")
    val msgParam = request.getParameter("msg")
    val exceptionParam = request.getParameter("exception")

    // If no parameters are specified, assume the user did not enable event logging
    val defaultTitle = "Event logging is not enabled"
    val defaultContent =
      <div class="row-fluid">
        <div class="span12" style="font-size:14px">
          No event logs were found for this application! To
          <a href="http://spark.apache.org/docs/latest/monitoring.html">enable event logging</a>,
          set <span style="font-style:italic">spark.eventLog.enabled</span> to true and
          <span style="font-style:italic">spark.eventLog.dir</span> to the directory to which your
          event logs are written.
        </div>
      </div>

    val title = Option(titleParam).getOrElse(defaultTitle)
    val content = Option(msgParam)
      .map { msg => URLDecoder.decode(msg, "UTF-8") }
      .map { msg =>
        <div class="row-fluid">
          <div class="span12" style="font-size:14px">{msg}</div>
        </div> ++
        Option(exceptionParam)
          .map { e => URLDecoder.decode(e, "UTF-8") }
          .map { e => <pre>{e}</pre> }
          .getOrElse(Seq.empty)
      }.getOrElse(defaultContent)

    UIUtils.basicSparkPage(content, title)
  }
} 
Example 29
Source File: ExecutorsPage.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.exec

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.status.api.v1.ExecutorSummary
import org.apache.spark.ui.{UIUtils, WebUIPage}

// This isn't even used anymore -- but we need to keep it b/c of a MiMa false positive
private[ui] case class ExecutorSummaryInfo(
    id: String,
    hostPort: String,
    rddBlocks: Int,
    memoryUsed: Long,
    diskUsed: Long,
    activeTasks: Int,
    failedTasks: Int,
    completedTasks: Int,
    totalTasks: Int,
    totalDuration: Long,
    totalInputBytes: Long,
    totalShuffleRead: Long,
    totalShuffleWrite: Long,
    maxMemory: Long,
    executorLogs: Map[String, String])


private[ui] class ExecutorsPage(
    parent: ExecutorsTab,
    threadDumpEnabled: Boolean)
  extends WebUIPage("") {
  private val listener = parent.listener

  def render(request: HttpServletRequest): Seq[Node] = {
    val content =
      <div>
        {
          <div id="active-executors"></div> ++
          <script src={UIUtils.prependBaseUri("/static/utils.js")}></script> ++
          <script src={UIUtils.prependBaseUri("/static/executorspage.js")}></script> ++
          <script>setThreadDumpEnabled({threadDumpEnabled})</script>
        }
      </div>;

    UIUtils.headerSparkPage("Executors", content, parent, useDataTables = true)
  }
}

private[spark] object ExecutorsPage {
  
  def getExecInfo(
      listener: ExecutorsListener,
      statusId: Int,
      isActive: Boolean): ExecutorSummary = {
    val status = if (isActive) {
      listener.activeStorageStatusList(statusId)
    } else {
      listener.deadStorageStatusList(statusId)
    }
    val execId = status.blockManagerId.executorId
    val hostPort = status.blockManagerId.hostPort
    val rddBlocks = status.numBlocks
    val memUsed = status.memUsed
    val maxMem = status.maxMem
    val diskUsed = status.diskUsed
    val taskSummary = listener.executorToTaskSummary.getOrElse(execId, ExecutorTaskSummary(execId))

    new ExecutorSummary(
      execId,
      hostPort,
      isActive,
      rddBlocks,
      memUsed,
      diskUsed,
      taskSummary.totalCores,
      taskSummary.tasksMax,
      taskSummary.tasksActive,
      taskSummary.tasksFailed,
      taskSummary.tasksComplete,
      taskSummary.tasksActive + taskSummary.tasksFailed + taskSummary.tasksComplete,
      taskSummary.duration,
      taskSummary.jvmGCTime,
      taskSummary.inputBytes,
      taskSummary.shuffleRead,
      taskSummary.shuffleWrite,
      maxMem,
      taskSummary.executorLogs
    )
  }
} 
Example 30
Source File: ExecutorThreadDumpPage.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.exec

import javax.servlet.http.HttpServletRequest

import scala.xml.{Node, Text}

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") {

  private val sc = parent.sc

  def render(request: HttpServletRequest): Seq[Node] = {
    val executorId = Option(request.getParameter("executorId")).map { executorId =>
      UIUtils.decodeURLParameter(executorId)
    }.getOrElse {
      throw new IllegalArgumentException(s"Missing executorId parameter")
    }
    val time = System.currentTimeMillis()
    val maybeThreadDump = sc.get.getExecutorThreadDump(executorId)

    val content = maybeThreadDump.map { threadDump =>
      val dumpRows = threadDump.sortWith {
        case (threadTrace1, threadTrace2) =>
          val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0
          val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0
          if (v1 == v2) {
            threadTrace1.threadName.toLowerCase < threadTrace2.threadName.toLowerCase
          } else {
            v1 > v2
          }
      }.map { thread =>
        val threadId = thread.threadId
        <tr id={s"thread_${threadId}_tr"} class="accordion-heading"
            onclick={s"toggleThreadStackTrace($threadId, false)"}
            onmouseover={s"onMouseOverAndOut($threadId)"}
            onmouseout={s"onMouseOverAndOut($threadId)"}>
          <td id={s"${threadId}_td_id"}>{threadId}</td>
          <td id={s"${threadId}_td_name"}>{thread.threadName}</td>
          <td id={s"${threadId}_td_state"}>{thread.threadState}</td>
          <td id={s"${threadId}_td_stacktrace"} class="hidden">{thread.stackTrace}</td>
        </tr>
      }

    <div class="row-fluid">
      <p>Updated at {UIUtils.formatDate(time)}</p>
      {
        // scalastyle:off
        <p><a class="expandbutton" onClick="expandAllThreadStackTrace(true)">
          Expand All
        </a></p>
        <p><a class="expandbutton hidden" onClick="collapseAllThreadStackTrace(true)">
          Collapse All
        </a></p>
        <div class="form-inline">
        <div class="bs-example" data-example-id="simple-form-inline">
          <div class="form-group">
            <div class="input-group">
              Search: <input type="text" class="form-control" id="search" oninput="onSearchStringChange()"></input>
            </div>
          </div>
        </div>
        </div>
        <p></p>
        // scalastyle:on
      }
      <table class={UIUtils.TABLE_CLASS_STRIPED + " accordion-group" + " sortable"}>
        <thead>
          <th onClick="collapseAllThreadStackTrace(false)">Thread ID</th>
          <th onClick="collapseAllThreadStackTrace(false)">Thread Name</th>
          <th onClick="collapseAllThreadStackTrace(false)">Thread State</th>
        </thead>
        <tbody>{dumpRows}</tbody>
      </table>
    </div>
    }.getOrElse(Text("Error fetching thread dump"))
    UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent)
  }
} 
Example 31
Source File: EnvironmentPage.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.env

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") {
  private val listener = parent.listener

  private def removePass(kv: (String, String)): (String, String) = {
    if (kv._1.toLowerCase.contains("password") || kv._1.toLowerCase.contains("secret")) {
      (kv._1, "******")
    } else kv
  }

  def render(request: HttpServletRequest): Seq[Node] = {
    val runtimeInformationTable = UIUtils.listingTable(
      propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true)
    val sparkPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.sparkProperties.map(removePass), fixedWidth = true)
    val systemPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true)
    val classpathEntriesTable = UIUtils.listingTable(
      classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true)
    val content =
      <span>
        <h4>Runtime Information</h4> {runtimeInformationTable}
        <h4>Spark Properties</h4> {sparkPropertiesTable}
        <h4>System Properties</h4> {systemPropertiesTable}
        <h4>Classpath Entries</h4> {classpathEntriesTable}
      </span>

    UIUtils.headerSparkPage("Environment", content, parent)
  }

  private def propertyHeader = Seq("Name", "Value")
  private def classPathHeaders = Seq("Resource", "Source")
  private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr>
} 
Example 32
Source File: PoolTable.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import java.net.URLEncoder

import scala.collection.mutable.HashMap
import scala.xml.Node

import org.apache.spark.scheduler.{Schedulable, StageInfo}
import org.apache.spark.ui.UIUtils


private[ui] class PoolTable(pools: Seq[Schedulable], parent: StagesTab) {
  private val listener = parent.progressListener

  def toNodeSeq: Seq[Node] = {
    listener.synchronized {
      poolTable(poolRow, pools)
    }
  }

  private def poolTable(
      makeRow: (Schedulable, HashMap[String, HashMap[Int, StageInfo]]) => Seq[Node],
      rows: Seq[Schedulable]): Seq[Node] = {
    <table class="table table-bordered table-striped table-condensed sortable table-fixed">
      <thead>
        <th>Pool Name</th>
        <th>Minimum Share</th>
        <th>Pool Weight</th>
        <th>Active Stages</th>
        <th>Running Tasks</th>
        <th>SchedulingMode</th>
      </thead>
      <tbody>
        {rows.map(r => makeRow(r, listener.poolToActiveStages))}
      </tbody>
    </table>
  }

  private def poolRow(
      p: Schedulable,
      poolToActiveStages: HashMap[String, HashMap[Int, StageInfo]]): Seq[Node] = {
    val activeStages = poolToActiveStages.get(p.name) match {
      case Some(stages) => stages.size
      case None => 0
    }
    val href = "%s/stages/pool?poolname=%s"
      .format(UIUtils.prependBaseUri(parent.basePath), URLEncoder.encode(p.name, "UTF-8"))
    <tr>
      <td>
        <a href={href}>{p.name}</a>
      </td>
      <td>{p.minShare}</td>
      <td>{p.weight}</td>
      <td>{activeStages}</td>
      <td>{p.runningTasks}</td>
      <td>{p.schedulingMode}</td>
    </tr>
  }
} 
Example 33
Source File: PoolPage.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.scheduler.StageInfo
import org.apache.spark.ui.{UIUtils, WebUIPage}


private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") {
  private val sc = parent.sc
  private val listener = parent.progressListener

  def render(request: HttpServletRequest): Seq[Node] = {
    listener.synchronized {
      val poolName = Option(request.getParameter("poolname")).map { poolname =>
        UIUtils.decodeURLParameter(poolname)
      }.getOrElse {
        throw new IllegalArgumentException(s"Missing poolname parameter")
      }

      val poolToActiveStages = listener.poolToActiveStages
      val activeStages = poolToActiveStages.get(poolName) match {
        case Some(s) => s.values.toSeq
        case None => Seq[StageInfo]()
      }
      val shouldShowActiveStages = activeStages.nonEmpty
      val activeStagesTable =
        new StageTableBase(request, activeStages, "", "activeStage", parent.basePath, "stages/pool",
          parent.progressListener, parent.isFairScheduler, parent.killEnabled,
          isFailedStage = false)

      // For now, pool information is only accessible in live UIs
      val pools = sc.map(_.getPoolForName(poolName).getOrElse {
        throw new IllegalArgumentException(s"Unknown poolname: $poolName")
      }).toSeq
      val poolTable = new PoolTable(pools, parent)

      var content = <h4>Summary </h4> ++ poolTable.toNodeSeq
      if (shouldShowActiveStages) {
        content ++= <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq
      }

      UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent)
    }
  }
} 
Example 34
Source File: ExecutorNumTab.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.monitor

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{SparkUI, SparkUITab, UIUtils, WebUIPage}

private class ExecutorNumTab(parent: SparkUI) extends SparkUITab(parent, "resources") {

  init()

  private def init(): Unit = {
    attachPage(new ExecutorNumPage(this))
  }

}

private class ExecutorNumPage(parent: SparkUITab) extends WebUIPage("") {

  def render(request: HttpServletRequest): Seq[Node] = {
    val content =
      <div>
        {
        <div id ="echart-container" class="row-fluid" style="height: 600px"></div> ++
        <script type="text/javascript"
                src="http://echarts.baidu.com/gallery/vendors/echarts/echarts.min.js"></script> ++
        <script src={UIUtils.prependBaseUri(
          request, "/static/special/executornumpage.js")}></script>
        }
      </div>

    UIUtils.headerSparkPage(request, "ExecutorNumCurve", content, parent, useDataTables = false)
  }
} 
Example 35
Source File: FiberCacheManagerPage.scala    From OAP   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.oap.ui

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.internal.Logging
import org.apache.spark.sql.execution.datasources.oap.filecache.CacheStats
import org.apache.spark.sql.oap.OapRuntime
import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class FiberCacheManagerPage(parent: OapTab) extends WebUIPage("") with Logging {

  def render(request: HttpServletRequest): Seq[Node] = {
    val content =
      <div>
        {
        <div id="active-cms"></div> ++
          <script src={UIUtils.prependBaseUri(request, parent.basePath,
            "/static/utils.js")}></script> ++
          <script src={UIUtils.prependBaseUri(request, parent.basePath,
            "/static/oap/oap.js")}></script>
        }
      </div>

    UIUtils.headerSparkPage(request, "FiberCacheManager", content, parent, useDataTables = true)
  }

}


class FiberCacheManagerSummary private[spark](
    val id: String,
    val hostPort: String,
    val isActive: Boolean,
    val indexDataCacheSeparationEnable: Boolean,
    val memoryUsed: Long,
    val maxMemory: Long,
    val cacheSize: Long,
    val cacheCount: Long,
    val backendCacheSize: Long,
    val backendCacheCount: Long,
    val dataFiberSize: Long,
    val dataFiberCount: Long,
    val indexFiberSize: Long,
    val indexFiberCount: Long,
    val pendingFiberSize: Long,
    val pendingFiberCount: Long,
    val dataFiberHitCount: Long,
    val dataFiberMissCount: Long,
    val dataFiberLoadCount: Long,
    val dataTotalLoadTime: Long,
    val dataEvictionCount: Long,
    val indexFiberHitCount: Long,
    val indexFiberMissCount: Long,
    val indexFiberLoadCount: Long,
    val indexTotalLoadTime: Long,
    val indexEvictionCount: Long) 
Example 36
Source File: HistoryPage.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.history

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[history] class HistoryPage(parent: HistoryServer) extends WebUIPage("") {

  def render(request: HttpServletRequest): Seq[Node] = {
    val requestedIncomplete =
      Option(request.getParameter("showIncomplete")).getOrElse("false").toBoolean

    val allAppsSize = parent.getApplicationList().count(_.completed != requestedIncomplete)
    val eventLogsUnderProcessCount = parent.getEventLogsUnderProcess()
    val lastUpdatedTime = parent.getLastUpdatedTime()
    val providerConfig = parent.getProviderConfig()
    val content =
      <script src={UIUtils.prependBaseUri("/static/historypage-common.js")}></script>
      <div>
          <div class="span12">
            <ul class="unstyled">
              {providerConfig.map { case (k, v) => <li><strong>{k}:</strong> {v}</li> }}
            </ul>
            {
            if (eventLogsUnderProcessCount > 0) {
              <p>There are {eventLogsUnderProcessCount} event log(s) currently being
                processed which may result in additional applications getting listed on this page.
                Refresh the page to view updates. </p>
            }
            }

            {
            if (lastUpdatedTime > 0) {
              <p>Last updated: <span id="last-updated">{lastUpdatedTime}</span></p>
            }
            }

            {
            if (allAppsSize > 0) {
              <script src={UIUtils.prependBaseUri("/static/dataTables.rowsGroup.js")}></script> ++
                <div id="history-summary" class="span12 pagination"></div> ++
                <script src={UIUtils.prependBaseUri("/static/utils.js")}></script> ++
                <script src={UIUtils.prependBaseUri("/static/historypage.js")}></script> ++
                <script>setAppLimit({parent.maxApplications})</script>
            } else if (requestedIncomplete) {
              <h4>No incomplete applications found!</h4>
            } else if (eventLogsUnderProcessCount > 0) {
              <h4>No completed applications found!</h4>
            } else {
              <h4>No completed applications found!</h4> ++ parent.emptyListingHtml
            }
            }

            <a href={makePageLink(!requestedIncomplete)}>
              {
              if (requestedIncomplete) {
                "Back to completed applications"
              } else {
                "Show incomplete applications"
              }
              }
            </a>
          </div>
      </div>
    UIUtils.basicSparkPage(content, "History Server", true)
  }

  private def makePageLink(showIncomplete: Boolean): String = {
    UIUtils.prependBaseUri("/?" + "showIncomplete=" + showIncomplete)
  }
} 
Example 37
Source File: ExecutorsPage.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.exec

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.status.api.v1.ExecutorSummary
import org.apache.spark.ui.{UIUtils, WebUIPage}

// This isn't even used anymore -- but we need to keep it b/c of a MiMa false positive
private[ui] case class ExecutorSummaryInfo(
    id: String,
    hostPort: String,
    rddBlocks: Int,
    memoryUsed: Long,
    diskUsed: Long,
    activeTasks: Int,
    failedTasks: Int,
    completedTasks: Int,
    totalTasks: Int,
    totalDuration: Long,
    totalInputBytes: Long,
    totalShuffleRead: Long,
    totalShuffleWrite: Long,
    maxMemory: Long,
    executorLogs: Map[String, String])


private[ui] class ExecutorsPage(
    parent: ExecutorsTab,
    threadDumpEnabled: Boolean)
  extends WebUIPage("") {
  private val listener = parent.listener

  def render(request: HttpServletRequest): Seq[Node] = {
    val content =
      <div>
        {
          <div id="active-executors"></div> ++
          <script src={UIUtils.prependBaseUri("/static/utils.js")}></script> ++
          <script src={UIUtils.prependBaseUri("/static/executorspage.js")}></script> ++
          <script>setThreadDumpEnabled({threadDumpEnabled})</script>
        }
      </div>;

    UIUtils.headerSparkPage("Executors", content, parent, useDataTables = true)
  }
}

private[spark] object ExecutorsPage {
  
  def getExecInfo(
      listener: ExecutorsListener,
      statusId: Int,
      isActive: Boolean): ExecutorSummary = {
    val status = if (isActive) {
      listener.activeStorageStatusList(statusId)
    } else {
      listener.deadStorageStatusList(statusId)
    }
    val execId = status.blockManagerId.executorId
    val hostPort = status.blockManagerId.hostPort
    val rddBlocks = status.numBlocks
    val memUsed = status.memUsed
    val maxMem = status.maxMem
    val diskUsed = status.diskUsed
    val taskSummary = listener.executorToTaskSummary.getOrElse(execId, ExecutorTaskSummary(execId))

    new ExecutorSummary(
      execId,
      hostPort,
      isActive,
      rddBlocks,
      memUsed,
      diskUsed,
      taskSummary.totalCores,
      taskSummary.tasksMax,
      taskSummary.tasksActive,
      taskSummary.tasksFailed,
      taskSummary.tasksComplete,
      taskSummary.tasksActive + taskSummary.tasksFailed + taskSummary.tasksComplete,
      taskSummary.duration,
      taskSummary.jvmGCTime,
      taskSummary.inputBytes,
      taskSummary.shuffleRead,
      taskSummary.shuffleWrite,
      maxMem,
      taskSummary.executorLogs
    )
  }
} 
Example 38
Source File: ExecutorThreadDumpPage.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.exec

import javax.servlet.http.HttpServletRequest

import scala.xml.{Node, Text}

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") {

  private val sc = parent.sc

  def render(request: HttpServletRequest): Seq[Node] = {
    val executorId = Option(request.getParameter("executorId")).map { executorId =>
      UIUtils.decodeURLParameter(executorId)
    }.getOrElse {
      throw new IllegalArgumentException(s"Missing executorId parameter")
    }
    val time = System.currentTimeMillis()
    val maybeThreadDump = sc.get.getExecutorThreadDump(executorId)

    val content = maybeThreadDump.map { threadDump =>
      val dumpRows = threadDump.sortWith {
        case (threadTrace1, threadTrace2) =>
          val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0
          val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0
          if (v1 == v2) {
            threadTrace1.threadName.toLowerCase < threadTrace2.threadName.toLowerCase
          } else {
            v1 > v2
          }
      }.map { thread =>
        val threadId = thread.threadId
        val blockedBy = thread.blockedByThreadId match {
          case Some(blockedByThreadId) =>
            <div>
              Blocked by <a href={s"#${thread.blockedByThreadId}_td_id"}>
              Thread {thread.blockedByThreadId} {thread.blockedByLock}</a>
            </div>
          case None => Text("")
        }
        val heldLocks = thread.holdingLocks.mkString(", ")

        <tr id={s"thread_${threadId}_tr"} class="accordion-heading"
            onclick={s"toggleThreadStackTrace($threadId, false)"}
            onmouseover={s"onMouseOverAndOut($threadId)"}
            onmouseout={s"onMouseOverAndOut($threadId)"}>
          <td id={s"${threadId}_td_id"}>{threadId}</td>
          <td id={s"${threadId}_td_name"}>{thread.threadName}</td>
          <td id={s"${threadId}_td_state"}>{thread.threadState}</td>
          <td id={s"${threadId}_td_locking"}>{blockedBy}{heldLocks}</td>
          <td id={s"${threadId}_td_stacktrace"} class="hidden">{thread.stackTrace}</td>
        </tr>
      }

    <div class="row-fluid">
      <p>Updated at {UIUtils.formatDate(time)}</p>
      {
        // scalastyle:off
        <p><a class="expandbutton" onClick="expandAllThreadStackTrace(true)">
          Expand All
        </a></p>
        <p><a class="expandbutton hidden" onClick="collapseAllThreadStackTrace(true)">
          Collapse All
        </a></p>
        <div class="form-inline">
        <div class="bs-example" data-example-id="simple-form-inline">
          <div class="form-group">
            <div class="input-group">
              Search: <input type="text" class="form-control" id="search" oninput="onSearchStringChange()"></input>
            </div>
          </div>
        </div>
        </div>
        <p></p>
        // scalastyle:on
      }
      <table class={UIUtils.TABLE_CLASS_STRIPED + " accordion-group" + " sortable"}>
        <thead>
          <th onClick="collapseAllThreadStackTrace(false)">Thread ID</th>
          <th onClick="collapseAllThreadStackTrace(false)">Thread Name</th>
          <th onClick="collapseAllThreadStackTrace(false)">Thread State</th>
          <th onClick="collapseAllThreadStackTrace(false)">Thread Locks</th>
        </thead>
        <tbody>{dumpRows}</tbody>
      </table>
    </div>
    }.getOrElse(Text("Error fetching thread dump"))
    UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent)
  }
} 
Example 39
Source File: EnvironmentPage.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.env

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") {
  private val listener = parent.listener

  private def removePass(kv: (String, String)): (String, String) = {
    if (kv._1.toLowerCase.contains("password") || kv._1.toLowerCase.contains("secret")) {
      (kv._1, "******")
    } else kv
  }

  def render(request: HttpServletRequest): Seq[Node] = {
    val runtimeInformationTable = UIUtils.listingTable(
      propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true)
    val sparkPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.sparkProperties.map(removePass), fixedWidth = true)
    val systemPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true)
    val classpathEntriesTable = UIUtils.listingTable(
      classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true)
    val content =
      <span>
        <h4>Runtime Information</h4> {runtimeInformationTable}
        <h4>Spark Properties</h4> {sparkPropertiesTable}
        <h4>System Properties</h4> {systemPropertiesTable}
        <h4>Classpath Entries</h4> {classpathEntriesTable}
      </span>

    UIUtils.headerSparkPage("Environment", content, parent)
  }

  private def propertyHeader = Seq("Name", "Value")
  private def classPathHeaders = Seq("Resource", "Source")
  private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr>
} 
Example 40
Source File: PoolTable.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import java.net.URLEncoder

import scala.collection.mutable.HashMap
import scala.xml.Node

import org.apache.spark.scheduler.{Schedulable, StageInfo}
import org.apache.spark.ui.UIUtils


private[ui] class PoolTable(pools: Seq[Schedulable], parent: StagesTab) {
  private val listener = parent.progressListener

  def toNodeSeq: Seq[Node] = {
    listener.synchronized {
      poolTable(poolRow, pools)
    }
  }

  private def poolTable(
      makeRow: (Schedulable, HashMap[String, HashMap[Int, StageInfo]]) => Seq[Node],
      rows: Seq[Schedulable]): Seq[Node] = {
    <table class="table table-bordered table-striped table-condensed sortable table-fixed">
      <thead>
        <th>Pool Name</th>
        <th>Minimum Share</th>
        <th>Pool Weight</th>
        <th>Active Stages</th>
        <th>Running Tasks</th>
        <th>SchedulingMode</th>
      </thead>
      <tbody>
        {rows.map(r => makeRow(r, listener.poolToActiveStages))}
      </tbody>
    </table>
  }

  private def poolRow(
      p: Schedulable,
      poolToActiveStages: HashMap[String, HashMap[Int, StageInfo]]): Seq[Node] = {
    val activeStages = poolToActiveStages.get(p.name) match {
      case Some(stages) => stages.size
      case None => 0
    }
    val href = "%s/stages/pool?poolname=%s"
      .format(UIUtils.prependBaseUri(parent.basePath), URLEncoder.encode(p.name, "UTF-8"))
    <tr>
      <td>
        <a href={href}>{p.name}</a>
      </td>
      <td>{p.minShare}</td>
      <td>{p.weight}</td>
      <td>{activeStages}</td>
      <td>{p.runningTasks}</td>
      <td>{p.schedulingMode}</td>
    </tr>
  }
} 
Example 41
Source File: PoolPage.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.scheduler.StageInfo
import org.apache.spark.ui.{UIUtils, WebUIPage}


private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") {
  private val sc = parent.sc
  private val listener = parent.progressListener

  def render(request: HttpServletRequest): Seq[Node] = {
    listener.synchronized {
      val poolName = Option(request.getParameter("poolname")).map { poolname =>
        UIUtils.decodeURLParameter(poolname)
      }.getOrElse {
        throw new IllegalArgumentException(s"Missing poolname parameter")
      }

      val poolToActiveStages = listener.poolToActiveStages
      val activeStages = poolToActiveStages.get(poolName) match {
        case Some(s) => s.values.toSeq
        case None => Seq[StageInfo]()
      }
      val shouldShowActiveStages = activeStages.nonEmpty
      val activeStagesTable =
        new StageTableBase(request, activeStages, "", "activeStage", parent.basePath, "stages/pool",
          parent.progressListener, parent.isFairScheduler, parent.killEnabled,
          isFailedStage = false)

      // For now, pool information is only accessible in live UIs
      val pools = sc.map(_.getPoolForName(poolName).getOrElse {
        throw new IllegalArgumentException(s"Unknown poolname: $poolName")
      }).toSeq
      val poolTable = new PoolTable(pools, parent)

      var content = <h4>Summary </h4> ++ poolTable.toNodeSeq
      if (shouldShowActiveStages) {
        content ++= <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq
      }

      UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent)
    }
  }
} 
Example 42
Source File: HistoryPage.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.history

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[history] class HistoryPage(parent: HistoryServer) extends WebUIPage("") {

  def render(request: HttpServletRequest): Seq[Node] = {
    val requestedIncomplete =
      Option(request.getParameter("showIncomplete")).getOrElse("false").toBoolean

    val allAppsSize = parent.getApplicationList().count(_.completed != requestedIncomplete)
    val providerConfig = parent.getProviderConfig()
    val content =
      <div>
          <div class="span12">
            <ul class="unstyled">
              {providerConfig.map { case (k, v) => <li><strong>{k}:</strong> {v}</li> }}
            </ul>
            {
            if (allAppsSize > 0) {
              <script src={UIUtils.prependBaseUri("/static/dataTables.rowsGroup.js")}></script> ++
                <div id="history-summary" class="span12 pagination"></div> ++
                <script src={UIUtils.prependBaseUri("/static/utils.js")}></script> ++
                <script src={UIUtils.prependBaseUri("/static/historypage.js")}></script> ++
                <script>setAppLimit({parent.maxApplications})</script>
            } else if (requestedIncomplete) {
              <h4>No incomplete applications found!</h4>
            } else {
              <h4>No completed applications found!</h4> ++ parent.emptyListingHtml
            }
            }

            <a href={makePageLink(!requestedIncomplete)}>
              {
              if (requestedIncomplete) {
                "Back to completed applications"
              } else {
                "Show incomplete applications"
              }
              }
            </a>
          </div>
      </div>
    UIUtils.basicSparkPage(content, "History Server", true)
  }

  private def makePageLink(showIncomplete: Boolean): String = {
    UIUtils.prependBaseUri("/?" + "showIncomplete=" + showIncomplete)
  }
} 
Example 43
Source File: ExecutorThreadDumpPage.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.exec

import java.net.URLDecoder
import javax.servlet.http.HttpServletRequest

import scala.util.Try
import scala.xml.{Text, Node}

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") {

  private val sc = parent.sc

  def render(request: HttpServletRequest): Seq[Node] = {
    val executorId = Option(request.getParameter("executorId")).map {
      executorId =>
        // Due to YARN-2844, "<driver>" in the url will be encoded to "%25253Cdriver%25253E" when
        // running in yarn-cluster mode. `request.getParameter("executorId")` will return
        // "%253Cdriver%253E". Therefore we need to decode it until we get the real id.
        var id = executorId
        var decodedId = URLDecoder.decode(id, "UTF-8")
        while (id != decodedId) {
          id = decodedId
          decodedId = URLDecoder.decode(id, "UTF-8")
        }
        id
    }.getOrElse {
      throw new IllegalArgumentException(s"Missing executorId parameter")
    }
    val time = System.currentTimeMillis()
    val maybeThreadDump = sc.get.getExecutorThreadDump(executorId)

    val content = maybeThreadDump.map { threadDump =>
      val dumpRows = threadDump.map { thread =>
        <div class="accordion-group">
          <div class="accordion-heading" onclick="$(this).next().toggleClass('hidden')">
            <a class="accordion-toggle">
              Thread {thread.threadId}: {thread.threadName} ({thread.threadState})
            </a>
          </div>
          <div class="accordion-body hidden">
            <div class="accordion-inner">
              <pre>{thread.stackTrace}</pre>
            </div>
          </div>
        </div>
      }

      <div class="row-fluid">
        <p>Updated at {UIUtils.formatDate(time)}</p>
        {
          // scalastyle:off
          <p><a class="expandbutton"
                onClick="$('.accordion-body').removeClass('hidden'); $('.expandbutton').toggleClass('hidden')">
            Expand All
          </a></p>
          <p><a class="expandbutton hidden"
                onClick="$('.accordion-body').addClass('hidden'); $('.expandbutton').toggleClass('hidden')">
            Collapse All
          </a></p>
          // scalastyle:on
        }
        <div class="accordion">{dumpRows}</div>
      </div>
    }.getOrElse(Text("Error fetching thread dump"))
    UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent)
  }
} 
Example 44
Source File: EnvironmentPage.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.env

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") {
  private val listener = parent.listener

  def render(request: HttpServletRequest): Seq[Node] = {
    val runtimeInformationTable = UIUtils.listingTable(
      propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true)
    val sparkPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.sparkProperties, fixedWidth = true)
    val systemPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true)
    val classpathEntriesTable = UIUtils.listingTable(
      classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true)
    val content =
      <span>
        <h4>Runtime Information</h4> {runtimeInformationTable}
        <h4>Spark Properties</h4> {sparkPropertiesTable}
        <h4>System Properties</h4> {systemPropertiesTable}
        <h4>Classpath Entries</h4> {classpathEntriesTable}
      </span>

    UIUtils.headerSparkPage("Environment", content, parent)
  }

  private def propertyHeader = Seq("Name", "Value")
  private def classPathHeaders = Seq("Resource", "Source")
  private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr>
} 
Example 45
Source File: StoragePage.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.storage

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.storage.RDDInfo
import org.apache.spark.ui.{WebUIPage, UIUtils}
import org.apache.spark.util.Utils


  private def rddRow(rdd: RDDInfo): Seq[Node] = {
    // scalastyle:off
    <tr>
      <td>
        <a href={"%s/storage/rdd?id=%s".format(UIUtils.prependBaseUri(parent.basePath), rdd.id)}>
          {rdd.name}
        </a>
      </td>
      <td>{rdd.storageLevel.description}
      </td>
      <td>{rdd.numCachedPartitions}</td>
      <td>{"%.0f%%".format(rdd.numCachedPartitions * 100.0 / rdd.numPartitions)}</td>
      <td sorttable_customkey={rdd.memSize.toString}>{Utils.bytesToString(rdd.memSize)}</td>
      <td sorttable_customkey={rdd.tachyonSize.toString}>{Utils.bytesToString(rdd.tachyonSize)}</td>
      <td sorttable_customkey={rdd.diskSize.toString} >{Utils.bytesToString(rdd.diskSize)}</td>
    </tr>
    // scalastyle:on
  }
} 
Example 46
Source File: PoolTable.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import scala.collection.mutable.HashMap
import scala.xml.Node

import org.apache.spark.scheduler.{Schedulable, StageInfo}
import org.apache.spark.ui.UIUtils


private[ui] class PoolTable(pools: Seq[Schedulable], parent: StagesTab) {
  private val listener = parent.listener

  def toNodeSeq: Seq[Node] = {
    listener.synchronized {
      poolTable(poolRow, pools)
    }
  }

  private def poolTable(
      makeRow: (Schedulable, HashMap[String, HashMap[Int, StageInfo]]) => Seq[Node],
      rows: Seq[Schedulable]): Seq[Node] = {
    <table class="table table-bordered table-striped table-condensed sortable table-fixed">
      <thead>
        <th>Pool Name</th>
        <th>Minimum Share</th>
        <th>Pool Weight</th>
        <th>Active Stages</th>
        <th>Running Tasks</th>
        <th>SchedulingMode</th>
      </thead>
      <tbody>
        {rows.map(r => makeRow(r, listener.poolToActiveStages))}
      </tbody>
    </table>
  }

  private def poolRow(
      p: Schedulable,
      poolToActiveStages: HashMap[String, HashMap[Int, StageInfo]]): Seq[Node] = {
    val activeStages = poolToActiveStages.get(p.name) match {
      case Some(stages) => stages.size
      case None => 0
    }
    val href = "%s/stages/pool?poolname=%s"
      .format(UIUtils.prependBaseUri(parent.basePath), p.name)
    <tr>
      <td>
        <a href={href}>{p.name}</a>
      </td>
      <td>{p.minShare}</td>
      <td>{p.weight}</td>
      <td>{activeStages}</td>
      <td>{p.runningTasks}</td>
      <td>{p.schedulingMode}</td>
    </tr>
  }
} 
Example 47
Source File: PoolPage.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.scheduler.{Schedulable, StageInfo}
import org.apache.spark.ui.{WebUIPage, UIUtils}


private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") {
  private val sc = parent.sc
  private val listener = parent.listener

  def render(request: HttpServletRequest): Seq[Node] = {
    listener.synchronized {
      val poolName = request.getParameter("poolname")
      require(poolName != null && poolName.nonEmpty, "Missing poolname parameter")

      val poolToActiveStages = listener.poolToActiveStages
      val activeStages = poolToActiveStages.get(poolName) match {
        case Some(s) => s.values.toSeq
        case None => Seq[StageInfo]()
      }
      val activeStagesTable = new StageTableBase(activeStages.sortBy(_.submissionTime).reverse,
        parent.basePath, parent.listener, isFairScheduler = parent.isFairScheduler,
        killEnabled = parent.killEnabled)

      // For now, pool information is only accessible in live UIs
      val pools = sc.map(_.getPoolForName(poolName).get).toSeq
      val poolTable = new PoolTable(pools, parent)

      val content =
        <h4>Summary </h4> ++ poolTable.toNodeSeq ++
        <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq

      UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent)
    }
  }
} 
Example 48
Source File: DruidQueriesPage.scala    From spark-druid-olap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive.thriftserver.sparklinedata.ui

import javax.servlet.http.HttpServletRequest
import org.apache.spark.sql.SPLLogging
import org.apache.spark.ui.{UIUtils, WebUIPage}
import org.sparklinedata.druid.metadata.{DruidQueryExecutionView, DruidQueryHistory}
import scala.xml.Node


private[ui] class DruidQueriesPage(parent: DruidQueriesTab) extends WebUIPage("") with SPLLogging {

  def render(request: HttpServletRequest): Seq[Node] = {
    val content = generateDruidStatsTable()
    UIUtils.headerSparkPage("Druid Query Details", content, parent, Some(5000))
  }

  private def generateDruidStatsTable(): Seq[Node] = {
    val numStatement = DruidQueryHistory.getHistory.size
    val table = if (numStatement > 0) {
      val headerRow = Seq("stageId", "partitionId", "taskAttemptId", "druidQueryServer",
        "druidSegIntervals", "startTime", "druidExecTime", "queryExecTime", "numRows",
        "druidQuery", "sql")
      val druidContent = DruidQueryHistory.getHistory
      def generateDataRow(info: DruidQueryExecutionView): Seq[Node] = {
        var interval = ""
        for(temp <- info.druidSegIntervals){
          interval += temp
        }
        val stageLink = "%s/stages/stage?id=%s&attempt=0"
          .format(UIUtils.prependBaseUri(parent.basePath), info.stageId)
        <tr>
          <td><a href={stageLink}> {info.stageId} </a></td>
          <td>
            {info.partitionId}
          </td>
          <td>{info.taskAttemptId}</td>
          <td>{info.druidQueryServer}</td>
          <td>{interval}</td>
          <td>{info.startTime}</td>
          <td>{info.druidExecTime}</td>
          <td>{info.queryExecTime}</td>
          <td>{info.numRows}</td>
          <td>{info.druidQuery}</td>
          <td>{info.sqlStmt.getOrElse("none")}</td>
        </tr>
      }
      Some(UIUtils.listingTable(headerRow, generateDataRow,
        druidContent, false, None, Seq(null), false))
    } else {
      None
    }
    val content =
      <h5 id="sqlstat">Druid Query Details</h5> ++
        <div>
          <ul class="unstyled">
            {table.getOrElse("No queries have been executed yet.")}
          </ul>
        </div>
    content
  }
} 
Example 49
Source File: HistoryPage.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.history

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[history] class HistoryPage(parent: HistoryServer) extends WebUIPage("") {

  def render(request: HttpServletRequest): Seq[Node] = {
    val requestedIncomplete =
      Option(request.getParameter("showIncomplete")).getOrElse("false").toBoolean

    val allAppsSize = parent.getApplicationList().count(_.completed != requestedIncomplete)
    val eventLogsUnderProcessCount = parent.getEventLogsUnderProcess()
    val lastUpdatedTime = parent.getLastUpdatedTime()
    val providerConfig = parent.getProviderConfig()
    val content =
      <script src={UIUtils.prependBaseUri("/static/historypage-common.js")}></script>
      <div>
          <div class="span12">
            <ul class="unstyled">
              {providerConfig.map { case (k, v) => <li><strong>{k}:</strong> {v}</li> }}
            </ul>
            {
            if (eventLogsUnderProcessCount > 0) {
              <p>There are {eventLogsUnderProcessCount} event log(s) currently being
                processed which may result in additional applications getting listed on this page.
                Refresh the page to view updates. </p>
            }
            }

            {
            if (lastUpdatedTime > 0) {
              <p>Last updated: <span id="last-updated">{lastUpdatedTime}</span></p>
            }
            }

            {
            if (allAppsSize > 0) {
              <script src={UIUtils.prependBaseUri("/static/dataTables.rowsGroup.js")}></script> ++
                <div id="history-summary" class="span12 pagination"></div> ++
                <script src={UIUtils.prependBaseUri("/static/utils.js")}></script> ++
                <script src={UIUtils.prependBaseUri("/static/historypage.js")}></script> ++
                <script>setAppLimit({parent.maxApplications})</script>
            } else if (requestedIncomplete) {
              <h4>No incomplete applications found!</h4>
            } else if (eventLogsUnderProcessCount > 0) {
              <h4>No completed applications found!</h4>
            } else {
              <h4>No completed applications found!</h4> ++ parent.emptyListingHtml
            }
            }

            <a href={makePageLink(!requestedIncomplete)}>
              {
              if (requestedIncomplete) {
                "Back to completed applications"
              } else {
                "Show incomplete applications"
              }
              }
            </a>
          </div>
      </div>
    UIUtils.basicSparkPage(content, "History Server", true)
  }

  private def makePageLink(showIncomplete: Boolean): String = {
    UIUtils.prependBaseUri("/?" + "showIncomplete=" + showIncomplete)
  }
} 
Example 50
Source File: ExecutorsPage.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.exec

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.status.api.v1.ExecutorSummary
import org.apache.spark.ui.{UIUtils, WebUIPage}

// This isn't even used anymore -- but we need to keep it b/c of a MiMa false positive
private[ui] case class ExecutorSummaryInfo(
    id: String,
    hostPort: String,
    rddBlocks: Int,
    memoryUsed: Long,
    diskUsed: Long,
    activeTasks: Int,
    failedTasks: Int,
    completedTasks: Int,
    totalTasks: Int,
    totalDuration: Long,
    totalInputBytes: Long,
    totalShuffleRead: Long,
    totalShuffleWrite: Long,
    maxMemory: Long,
    executorLogs: Map[String, String])


private[ui] class ExecutorsPage(
    parent: ExecutorsTab,
    threadDumpEnabled: Boolean)
  extends WebUIPage("") {
  private val listener = parent.listener

  def render(request: HttpServletRequest): Seq[Node] = {
    val content =
      <div>
        {
          <div id="active-executors"></div> ++
          <script src={UIUtils.prependBaseUri("/static/utils.js",
            sparkUser = parent.sparkUser)}></script> ++
          <script src={UIUtils.prependBaseUri("/static/executorspage.js",
            sparkUser = parent.sparkUser)}></script> ++
          <script>setThreadDumpEnabled({threadDumpEnabled})</script>
        }
      </div>;

    UIUtils.headerSparkPage("Executors", content, parent, useDataTables = true)
  }
}

private[spark] object ExecutorsPage {
  
  def getExecInfo(
      listener: ExecutorsListener,
      statusId: Int,
      isActive: Boolean): ExecutorSummary = {
    val status = if (isActive) {
      listener.activeStorageStatusList(statusId)
    } else {
      listener.deadStorageStatusList(statusId)
    }
    val execId = status.blockManagerId.executorId
    val hostPort = status.blockManagerId.hostPort
    val rddBlocks = status.numBlocks
    val memUsed = status.memUsed
    val maxMem = status.maxMem
    val diskUsed = status.diskUsed
    val taskSummary = listener.executorToTaskSummary.getOrElse(execId, ExecutorTaskSummary(execId))

    new ExecutorSummary(
      execId,
      hostPort,
      isActive,
      rddBlocks,
      memUsed,
      diskUsed,
      taskSummary.totalCores,
      taskSummary.tasksMax,
      taskSummary.tasksActive,
      taskSummary.tasksFailed,
      taskSummary.tasksComplete,
      taskSummary.tasksActive + taskSummary.tasksFailed + taskSummary.tasksComplete,
      taskSummary.duration,
      taskSummary.jvmGCTime,
      taskSummary.inputBytes,
      taskSummary.shuffleRead,
      taskSummary.shuffleWrite,
      maxMem,
      taskSummary.executorLogs
    )
  }
} 
Example 51
Source File: ExecutorThreadDumpPage.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.exec

import javax.servlet.http.HttpServletRequest

import scala.xml.{Node, Text}

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class ExecutorThreadDumpPage(parent: ExecutorsTab) extends WebUIPage("threadDump") {

  private val sc = parent.sc

  def render(request: HttpServletRequest): Seq[Node] = {
    val executorId = Option(request.getParameter("executorId")).map { executorId =>
      UIUtils.decodeURLParameter(executorId)
    }.getOrElse {
      throw new IllegalArgumentException(s"Missing executorId parameter")
    }
    val time = System.currentTimeMillis()
    val maybeThreadDump = sc.get.getExecutorThreadDump(executorId)

    val content = maybeThreadDump.map { threadDump =>
      val dumpRows = threadDump.sortWith {
        case (threadTrace1, threadTrace2) =>
          val v1 = if (threadTrace1.threadName.contains("Executor task launch")) 1 else 0
          val v2 = if (threadTrace2.threadName.contains("Executor task launch")) 1 else 0
          if (v1 == v2) {
            threadTrace1.threadName.toLowerCase < threadTrace2.threadName.toLowerCase
          } else {
            v1 > v2
          }
      }.map { thread =>
        val threadId = thread.threadId
        val blockedBy = thread.blockedByThreadId match {
          case Some(blockedByThreadId) =>
            <div>
              Blocked by <a href={s"#${thread.blockedByThreadId}_td_id"}>
              Thread {thread.blockedByThreadId} {thread.blockedByLock}</a>
            </div>
          case None => Text("")
        }
        val heldLocks = thread.holdingLocks.mkString(", ")

        <tr id={s"thread_${threadId}_tr"} class="accordion-heading"
            onclick={s"toggleThreadStackTrace($threadId, false)"}
            onmouseover={s"onMouseOverAndOut($threadId)"}
            onmouseout={s"onMouseOverAndOut($threadId)"}>
          <td id={s"${threadId}_td_id"}>{threadId}</td>
          <td id={s"${threadId}_td_name"}>{thread.threadName}</td>
          <td id={s"${threadId}_td_state"}>{thread.threadState}</td>
          <td id={s"${threadId}_td_locking"}>{blockedBy}{heldLocks}</td>
          <td id={s"${threadId}_td_stacktrace"} class="hidden">{thread.stackTrace}</td>
        </tr>
      }

    <div class="row-fluid">
      <p>Updated at {UIUtils.formatDate(time)}</p>
      {
        // scalastyle:off
        <p><a class="expandbutton" onClick="expandAllThreadStackTrace(true)">
          Expand All
        </a></p>
        <p><a class="expandbutton hidden" onClick="collapseAllThreadStackTrace(true)">
          Collapse All
        </a></p>
        <div class="form-inline">
        <div class="bs-example" data-example-id="simple-form-inline">
          <div class="form-group">
            <div class="input-group">
              Search: <input type="text" class="form-control" id="search" oninput="onSearchStringChange()"></input>
            </div>
          </div>
        </div>
        </div>
        <p></p>
        // scalastyle:on
      }
      <table class={UIUtils.TABLE_CLASS_STRIPED + " accordion-group" + " sortable"}>
        <thead>
          <th onClick="collapseAllThreadStackTrace(false)">Thread ID</th>
          <th onClick="collapseAllThreadStackTrace(false)">Thread Name</th>
          <th onClick="collapseAllThreadStackTrace(false)">Thread State</th>
          <th onClick="collapseAllThreadStackTrace(false)">Thread Locks</th>
        </thead>
        <tbody>{dumpRows}</tbody>
      </table>
    </div>
    }.getOrElse(Text("Error fetching thread dump"))
    UIUtils.headerSparkPage(s"Thread dump for executor $executorId", content, parent)
  }
} 
Example 52
Source File: EnvironmentPage.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.env

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.ui.{UIUtils, WebUIPage}

private[ui] class EnvironmentPage(parent: EnvironmentTab) extends WebUIPage("") {
  private val listener = parent.listener

  private def removePass(kv: (String, String)): (String, String) = {
    if (kv._1.toLowerCase.contains("password") || kv._1.toLowerCase.contains("secret")) {
      (kv._1, "******")
    } else kv
  }

  def render(request: HttpServletRequest): Seq[Node] = {
    val runtimeInformationTable = UIUtils.listingTable(
      propertyHeader, jvmRow, listener.jvmInformation, fixedWidth = true)
    val sparkPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.sparkProperties.map(removePass), fixedWidth = true)
    val systemPropertiesTable = UIUtils.listingTable(
      propertyHeader, propertyRow, listener.systemProperties, fixedWidth = true)
    val classpathEntriesTable = UIUtils.listingTable(
      classPathHeaders, classPathRow, listener.classpathEntries, fixedWidth = true)
    val content =
      <span>
        <h4>Runtime Information</h4> {runtimeInformationTable}
        <h4>Spark Properties</h4> {sparkPropertiesTable}
        <h4>System Properties</h4> {systemPropertiesTable}
        <h4>Classpath Entries</h4> {classpathEntriesTable}
      </span>

    UIUtils.headerSparkPage("Environment", content, parent)
  }

  private def propertyHeader = Seq("Name", "Value")
  private def classPathHeaders = Seq("Resource", "Source")
  private def jvmRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def propertyRow(kv: (String, String)) = <tr><td>{kv._1}</td><td>{kv._2}</td></tr>
  private def classPathRow(data: (String, String)) = <tr><td>{data._1}</td><td>{data._2}</td></tr>
} 
Example 53
Source File: PoolTable.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import java.net.URLEncoder

import scala.collection.mutable.HashMap
import scala.xml.Node

import org.apache.spark.scheduler.{Schedulable, StageInfo}
import org.apache.spark.ui.UIUtils


private[ui] class PoolTable(pools: Seq[Schedulable], parent: StagesTab) {
  private val listener = parent.progressListener

  def toNodeSeq: Seq[Node] = {
    listener.synchronized {
      poolTable(poolRow, pools)
    }
  }

  private def poolTable(
      makeRow: (Schedulable, HashMap[String, HashMap[Int, StageInfo]]) => Seq[Node],
      rows: Seq[Schedulable]): Seq[Node] = {
    <table class="table table-bordered table-striped table-condensed sortable table-fixed">
      <thead>
        <th>Pool Name</th>
        <th>Minimum Share</th>
        <th>Pool Weight</th>
        <th>Active Stages</th>
        <th>Running Tasks</th>
        <th>SchedulingMode</th>
      </thead>
      <tbody>
        {rows.map(r => makeRow(r, listener.poolToActiveStages))}
      </tbody>
    </table>
  }

  private def poolRow(
      p: Schedulable,
      poolToActiveStages: HashMap[String, HashMap[Int, StageInfo]]): Seq[Node] = {
    val activeStages = poolToActiveStages.get(p.name) match {
      case Some(stages) => stages.size
      case None => 0
    }
    val href = "%s/stages/pool?poolname=%s"
      .format(UIUtils.prependBaseUri(parent.basePath, sparkUser = parent.sparkUser), URLEncoder.encode(p.name, "UTF-8"))
    <tr>
      <td>
        <a href={href}>{p.name}</a>
      </td>
      <td>{p.minShare}</td>
      <td>{p.weight}</td>
      <td>{activeStages}</td>
      <td>{p.runningTasks}</td>
      <td>{p.schedulingMode}</td>
    </tr>
  }
} 
Example 54
Source File: PoolPage.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ui.jobs

import javax.servlet.http.HttpServletRequest

import scala.xml.Node

import org.apache.spark.scheduler.StageInfo
import org.apache.spark.ui.{UIUtils, WebUIPage}


private[ui] class PoolPage(parent: StagesTab) extends WebUIPage("pool") {
  private val sc = parent.sc
  private val listener = parent.progressListener

  def render(request: HttpServletRequest): Seq[Node] = {
    listener.synchronized {
      val poolName = Option(request.getParameter("poolname")).map { poolname =>
        UIUtils.decodeURLParameter(poolname)
      }.getOrElse {
        throw new IllegalArgumentException(s"Missing poolname parameter")
      }

      val poolToActiveStages = listener.poolToActiveStages
      val activeStages = poolToActiveStages.get(poolName) match {
        case Some(s) => s.values.toSeq
        case None => Seq[StageInfo]()
      }
      val shouldShowActiveStages = activeStages.nonEmpty
      val activeStagesTable =
        new StageTableBase(request, activeStages, "", "activeStage", parent.basePath, "stages/pool",
          parent.progressListener, parent.isFairScheduler, parent.killEnabled,
          isFailedStage = false, parent.sparkUser)

      // For now, pool information is only accessible in live UIs
      val pools = sc.map(_.getPoolForName(poolName).getOrElse {
        throw new IllegalArgumentException(s"Unknown poolname: $poolName")
      }).toSeq
      val poolTable = new PoolTable(pools, parent)

      var content = <h4>Summary </h4> ++ poolTable.toNodeSeq
      if (shouldShowActiveStages) {
        content ++= <h4>{activeStages.size} Active Stages</h4> ++ activeStagesTable.toNodeSeq
      }

      UIUtils.headerSparkPage("Fair Scheduler Pool: " + poolName, content, parent)
    }
  }
}