org.apache.spark.sql.hive.thriftserver.HiveThriftServer2 Scala Examples

The following examples show how to use org.apache.spark.sql.hive.thriftserver.HiveThriftServer2. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ThriftServerTab.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive.thriftserver.ui

import org.apache.spark.{SparkContext, SparkException}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.hive.thriftserver.HiveThriftServer2
import org.apache.spark.sql.hive.thriftserver.ui.ThriftServerTab._
import org.apache.spark.ui.{SparkUI, SparkUITab}


private[thriftserver] class ThriftServerTab(sparkContext: SparkContext)
  extends SparkUITab(getSparkUI(sparkContext), "sqlserver") with Logging {

  override val name = "JDBC/ODBC Server"

  val parent = getSparkUI(sparkContext)
  val listener = HiveThriftServer2.listener

  attachPage(new ThriftServerPage(this))
  attachPage(new ThriftServerSessionPage(this))
  parent.attachTab(this)

  def detach() {
    getSparkUI(sparkContext).detachTab(this)
  }
}

private[thriftserver] object ThriftServerTab {
  def getSparkUI(sparkContext: SparkContext): SparkUI = {
    sparkContext.ui.getOrElse {
      throw new SparkException("Parent SparkUI to attach this tab to not found!")
    }
  }
} 
Example 2
Source File: ThriftServerTab.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive.thriftserver.ui

import org.apache.spark.{SparkContext, SparkException}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.hive.thriftserver.HiveThriftServer2
import org.apache.spark.sql.hive.thriftserver.ui.ThriftServerTab._
import org.apache.spark.ui.{SparkUI, SparkUITab}


private[thriftserver] class ThriftServerTab(sparkContext: SparkContext)
  extends SparkUITab(getSparkUI(sparkContext), "sqlserver") with Logging {

  override val name = "JDBC/ODBC Server"

  val parent = getSparkUI(sparkContext)
  val listener = HiveThriftServer2.listener

  attachPage(new ThriftServerPage(this))
  attachPage(new ThriftServerSessionPage(this))
  parent.attachTab(this)

  def detach() {
    getSparkUI(sparkContext).detachTab(this)
  }
}

private[thriftserver] object ThriftServerTab {
  def getSparkUI(sparkContext: SparkContext): SparkUI = {
    sparkContext.ui.getOrElse {
      throw new SparkException("Parent SparkUI to attach this tab to not found!")
    }
  }
} 
Example 3
Source File: ThriftServerTab.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive.thriftserver.ui

import org.apache.spark.{SparkContext, SparkException}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.hive.thriftserver.HiveThriftServer2
import org.apache.spark.sql.hive.thriftserver.ui.ThriftServerTab._
import org.apache.spark.ui.{SparkUI, SparkUITab}


private[thriftserver] class ThriftServerTab(sparkContext: SparkContext)
  extends SparkUITab(getSparkUI(sparkContext), "sqlserver") with Logging {

  override val name = "JDBC/ODBC Server"

  val parent = getSparkUI(sparkContext)
  val listener = HiveThriftServer2.listener

  attachPage(new ThriftServerPage(this))
  attachPage(new ThriftServerSessionPage(this))
  parent.attachTab(this)

  def detach() {
    getSparkUI(sparkContext).detachTab(this)
  }
}

private[thriftserver] object ThriftServerTab {
  def getSparkUI(sparkContext: SparkContext): SparkUI = {
    sparkContext.ui.getOrElse {
      throw new SparkException("Parent SparkUI to attach this tab to not found!")
    }
  }
} 
Example 4
Source File: CarbonThriftServer.scala    From carbondata   with Apache License 2.0 5 votes vote down vote up
package org.apache.carbondata.spark.thriftserver

import java.io.File

import org.apache.spark.SparkConf
import org.apache.spark.sql.{CarbonEnv, SparkSession}
import org.apache.spark.sql.hive.thriftserver.HiveThriftServer2
import org.slf4j.{Logger, LoggerFactory}

import org.apache.carbondata.common.logging.LogServiceFactory
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.carbondata.spark.util.CarbonSparkUtil


object CarbonThriftServer {

  def main(args: Array[String]): Unit = {
    if (args.length != 0 && args.length != 3) {
      val logger: Logger = LoggerFactory.getLogger(this.getClass)
      logger.error("parameters: [access-key] [secret-key] [s3-endpoint]")
      System.exit(0)
    }
    val sparkConf = new SparkConf(loadDefaults = true)
    val builder = SparkSession
      .builder()
      .config(sparkConf)
      .appName("Carbon Thrift Server(uses CarbonExtensions)")
      .enableHiveSupport()
      .config("spark.sql.extensions", "org.apache.spark.sql.CarbonExtensions")
    configPropertiesFile(sparkConf, builder)
    if (args.length == 3) {
      builder.config(CarbonSparkUtil.getSparkConfForS3(args(0), args(1), args(2)))
    }
    val spark = builder.getOrCreate()
    CarbonEnv.getInstance(spark)
    waitingForSparkLaunch()
    HiveThriftServer2.startWithContext(spark.sqlContext)
  }

  private def waitingForSparkLaunch(): Unit = {
    val warmUpTime = CarbonProperties.getInstance().getProperty("carbon.spark.warmUpTime", "5000")
    try {
      Thread.sleep(Integer.parseInt(warmUpTime))
    } catch {
      case e: Exception =>
        val LOG = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
        LOG.error(s"Wrong value for carbon.spark.warmUpTime $warmUpTime " +
                  "Using default Value and proceeding")
        Thread.sleep(5000)
    }
  }

  private def configPropertiesFile(sparkConf: SparkConf, builder: SparkSession.Builder): Unit = {
    sparkConf.contains("carbon.properties.filepath") match {
      case false =>
        val sparkHome = System.getenv.get("SPARK_HOME")
        if (null != sparkHome) {
          val file = new File(sparkHome + '/' + "conf" + '/' + "carbon.properties")
          if (file.exists()) {
            builder.config("carbon.properties.filepath", file.getCanonicalPath)
            System.setProperty("carbon.properties.filepath", file.getCanonicalPath)
          }
        }
      case true =>
        System.setProperty(
          "carbon.properties.filepath", sparkConf.get("carbon.properties.filepath"))
    }
  }
} 
Example 5
Source File: ThriftServerTab.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive.thriftserver.ui

import org.apache.spark.sql.hive.thriftserver.{HiveThriftServer2, SparkSQLEnv}
import org.apache.spark.sql.hive.thriftserver.ui.ThriftServerTab._
import org.apache.spark.ui.{SparkUI, SparkUITab}
import org.apache.spark.{SparkContext, Logging, SparkException}


private[thriftserver] class ThriftServerTab(sparkContext: SparkContext)
  extends SparkUITab(getSparkUI(sparkContext), "sql") with Logging {

  override val name = "SQL"

  val parent = getSparkUI(sparkContext)
  val listener = HiveThriftServer2.listener

  attachPage(new ThriftServerPage(this))
  attachPage(new ThriftServerSessionPage(this))
  parent.attachTab(this)

  def detach() {
    getSparkUI(sparkContext).detachTab(this)
  }
}

private[thriftserver] object ThriftServerTab {
  def getSparkUI(sparkContext: SparkContext): SparkUI = {
    sparkContext.ui.getOrElse {
      throw new SparkException("Parent SparkUI to attach this tab to not found!")
    }
  }
} 
Example 6
Source File: SparkSQLEngineApp.scala    From kyuubi   with Apache License 2.0 5 votes vote down vote up
package org.apache.kyuubi.spark

import scala.collection.JavaConverters._

import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.apache.hive.service.ServiceException
import org.apache.hive.service.cli.CLIService
import org.apache.hive.service.cli.thrift.ThriftCLIService
import org.apache.spark.SparkConf
import org.apache.spark.sql.{RuntimeConfig, SparkSession}
import org.apache.spark.sql.hive.thriftserver.HiveThriftServer2

import org.apache.kyuubi.{KyuubiException, Logging}
import org.apache.kyuubi.config.KyuubiConf
import org.apache.kyuubi.ha.HighAvailabilityConf
import org.apache.kyuubi.ha.client.{RetryPolicies, ServiceDiscovery}

object SparkSQLEngineApp extends Logging {

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf(loadDefaults = true)
      // reduce conflict probability of the ui port
      .setIfMissing("spark.ui.port", "0")
    val session = SparkSession.builder()
      .config(conf)
      .appName("Kyuubi Spark SQL Engine App")
      .getOrCreate()

    try {
      initHiveServer2Configs(session.conf)
      val server = HiveThriftServer2.startWithContext(session.sqlContext)
      var thriftCLIService: ThriftCLIService = null
      var cliService: CLIService = null
      server.getServices.asScala.foreach {
        case t: ThriftCLIService =>
          if (t.getPortNumber == 0) {
            // Just a workaround for some Spark versions has concurrency issue for the local port.
            Thread.sleep(3000)
          }
          thriftCLIService = t
        case c: CLIService => cliService = c
        case _ =>
      }
      val port = thriftCLIService.getPortNumber
      val hostName = thriftCLIService.getServerIPAddress.getHostName
      val instance = s"$hostName:$port"
      val kyuubiConf = KyuubiConf()
      kyuubiConf.set(HighAvailabilityConf.HA_ZK_CONN_RETRY_POLICY,
        RetryPolicies.N_TIME.toString)
      conf.getAllWithPrefix("spark.kyuubi.").foreach { case (k, v) =>
        kyuubiConf.set(k.substring(6), v)
      }

      val postHook = new Thread {
        override def run(): Unit = {
          while (cliService.getSessionManager.getOpenSessionCount > 0) {
            Thread.sleep(60 * 1000)
          }
          server.stop()
        }
      }
      val namespace =
        kyuubiConf.get(HighAvailabilityConf.HA_ZK_NAMESPACE) + "-" + session.sparkContext.sparkUser
      val serviceDiscovery = new ServiceDiscovery(instance, namespace, postHook)
      try {
        serviceDiscovery.initialize(kyuubiConf)
        serviceDiscovery.start()
      } catch {
        case e: KyuubiException =>
          error(e.getMessage, e.getCause)
          serviceDiscovery.stop()
          server.stop()
          session.stop()
      }
    } catch {
      case e: ServiceException =>
         error("Failed to start HiveThriftServer2 with spark context", e)
         session.stop()
         System.exit(-1)
    }
  }

  private def initHiveServer2Configs(conf: RuntimeConfig): Unit = {
    val settings = Map(
      ConfVars.HIVE_SERVER2_LOGGING_OPERATION_LOG_LOCATION -> "",
      ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY -> "false",
      ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION -> null,
      ConfVars.HIVE_SERVER2_TRANSPORT_MODE -> "binary",
      ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST -> "",
      ConfVars.HIVE_SERVER2_THRIFT_PORT -> "0",
      ConfVars.METASTORE_KERBEROS_PRINCIPAL -> "")
    settings.foreach { kv => conf.set(kv._1.varname, kv._2) }
  }
} 
Example 7
Source File: ThriftServerTab.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive.thriftserver.ui

import org.apache.spark.sql.hive.thriftserver.{HiveThriftServer2, SparkSQLEnv}
import org.apache.spark.sql.hive.thriftserver.ui.ThriftServerTab._
import org.apache.spark.ui.{SparkUI, SparkUITab}
import org.apache.spark.{SparkContext, Logging, SparkException}


private[thriftserver] class ThriftServerTab(sparkContext: SparkContext)
  extends SparkUITab(getSparkUI(sparkContext), "sqlserver") with Logging {

  override val name = "JDBC/ODBC Server"

  val parent = getSparkUI(sparkContext)
  val listener = HiveThriftServer2.listener

  attachPage(new ThriftServerPage(this))
  attachPage(new ThriftServerSessionPage(this))
  parent.attachTab(this)

  def detach() {
    getSparkUI(sparkContext).detachTab(this)
  }
}

private[thriftserver] object ThriftServerTab {
  def getSparkUI(sparkContext: SparkContext): SparkUI = {
    sparkContext.ui.getOrElse {
      throw new SparkException("Parent SparkUI to attach this tab to not found!")
    }
  }
} 
Example 8
Source File: ThriftServerTab.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive.thriftserver.ui

import org.apache.spark.{SparkContext, SparkException}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.hive.thriftserver.HiveThriftServer2
import org.apache.spark.sql.hive.thriftserver.ui.ThriftServerTab._
import org.apache.spark.ui.{SparkUI, SparkUITab}


private[thriftserver] class ThriftServerTab(sparkContext: SparkContext)
  extends SparkUITab(getSparkUI(sparkContext), "sqlserver") with Logging {

  override val name = "JDBC/ODBC Server"

  val parent = getSparkUI(sparkContext)
  val listener = HiveThriftServer2.listener

  attachPage(new ThriftServerPage(this))
  attachPage(new ThriftServerSessionPage(this))
  parent.attachTab(this)

  def detach() {
    getSparkUI(sparkContext).detachTab(this)
  }
}

private[thriftserver] object ThriftServerTab {
  def getSparkUI(sparkContext: SparkContext): SparkUI = {
    sparkContext.ui.getOrElse {
      throw new SparkException("Parent SparkUI to attach this tab to not found!")
    }
  }
} 
Example 9
Source File: ThriftServerTab.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive.thriftserver.ui

import org.apache.spark.sql.hive.thriftserver.{HiveThriftServer2, SparkSQLEnv}
import org.apache.spark.sql.hive.thriftserver.ui.ThriftServerTab._
import org.apache.spark.ui.{SparkUI, SparkUITab}
import org.apache.spark.{SparkContext, Logging, SparkException}


private[thriftserver] class ThriftServerTab(sparkContext: SparkContext)
  extends SparkUITab(getSparkUI(sparkContext), "sqlserver") with Logging {

  override val name = "JDBC/ODBC Server"

  val parent = getSparkUI(sparkContext)
  val listener = HiveThriftServer2.listener

  attachPage(new ThriftServerPage(this))
  attachPage(new ThriftServerSessionPage(this))
  parent.attachTab(this)

  def detach() {
    getSparkUI(sparkContext).detachTab(this)
  }
}

private[thriftserver] object ThriftServerTab {
  def getSparkUI(sparkContext: SparkContext): SparkUI = {
    sparkContext.ui.getOrElse {
      throw new SparkException("Parent SparkUI to attach this tab to not found!")
    }
  }
} 
Example 10
Source File: ThriftServerTabSeq.scala    From bdg-sequila   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.hive.thriftserver.ui



import org.apache.spark.{SparkContext, SparkException}
import org.apache.spark.internal.Logging
import org.apache.spark.sql.hive.thriftserver.HiveThriftServer2Seq.HiveThriftServer2ListenerSeq
import org.apache.spark.sql.hive.thriftserver.{HiveThriftServer2, SequilaThriftServer}
import org.apache.spark.sql.hive.thriftserver.ui.ThriftServerTab._
import org.apache.spark.ui.{SparkUI, SparkUITab}


private[thriftserver] class ThriftServerTabSeq(sparkContext: SparkContext, list: HiveThriftServer2ListenerSeq)
  extends SparkUITab(getSparkUI(sparkContext), "sqlserver") with Logging {

  override val name = "SeQuiLa JDBC/ODBC Server"

  val parent = getSparkUI(sparkContext)
  val listener = list

  attachPage(new ThriftServerPageSeq(this))
  attachPage(new ThriftServerSessionPageSeq(this))
  parent.attachTab(this)

  def detach() {
    getSparkUI(sparkContext).detachTab(this)
  }
}

private[thriftserver] object ThriftServerTab {
  def getSparkUI(sparkContext: SparkContext): SparkUI = {
    sparkContext.ui.getOrElse {
      throw new SparkException("Parent SparkUI to attach this tab to not found!")
    }
  }
}