org.scalatest.PrivateMethodTester Scala Examples

The following examples show how to use org.scalatest.PrivateMethodTester. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: LogPageSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.worker.ui

import java.io.{File, FileWriter}

import org.mockito.Mockito.{mock, when}
import org.scalatest.PrivateMethodTester

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.worker.Worker

class LogPageSuite extends SparkFunSuite with PrivateMethodTester {

  test("get logs simple") {
    val webui = mock(classOf[WorkerWebUI])
    val worker = mock(classOf[Worker])
    val tmpDir = new File(sys.props("java.io.tmpdir"))
    val workDir = new File(tmpDir, "work-dir")
    workDir.mkdir()
    when(webui.workDir).thenReturn(workDir)
    when(webui.worker).thenReturn(worker)
    when(worker.conf).thenReturn(new SparkConf())
    val logPage = new LogPage(webui)

    // Prepare some fake log files to read later
    val out = "some stdout here"
    val err = "some stderr here"
    val tmpOut = new File(workDir, "stdout")
    val tmpErr = new File(workDir, "stderr")
    val tmpErrBad = new File(tmpDir, "stderr") // outside the working directory
    val tmpOutBad = new File(tmpDir, "stdout")
    val tmpRand = new File(workDir, "random")
    write(tmpOut, out)
    write(tmpErr, err)
    write(tmpOutBad, out)
    write(tmpErrBad, err)
    write(tmpRand, "1 6 4 5 2 7 8")

    // Get the logs. All log types other than "stderr" or "stdout" will be rejected
    val getLog = PrivateMethod[(String, Long, Long, Long)]('getLog)
    val (stdout, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "stdout", None, 100)
    val (stderr, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "stderr", None, 100)
    val (error1, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "random", None, 100)
    val (error2, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "does-not-exist.txt", None, 100)
    // These files exist, but live outside the working directory
    val (error3, _, _, _) =
      logPage invokePrivate getLog(tmpDir.getAbsolutePath, "stderr", None, 100)
    val (error4, _, _, _) =
      logPage invokePrivate getLog(tmpDir.getAbsolutePath, "stdout", None, 100)
    assert(stdout === out)
    assert(stderr === err)
    assert(error1.startsWith("Error: Log type must be one of "))
    assert(error2.startsWith("Error: Log type must be one of "))
    assert(error3.startsWith("Error: invalid log directory"))
    assert(error4.startsWith("Error: invalid log directory"))
  }

  
  private def write(f: File, s: String): Unit = {
    val writer = new FileWriter(f)
    try {
      writer.write(s)
    } finally {
      writer.close()
    }
  }

} 
Example 2
Source File: CommandUtilsSuite.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.worker

import org.scalatest.{Matchers, PrivateMethodTester}

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.Command
import org.apache.spark.util.Utils

class CommandUtilsSuite extends SparkFunSuite with Matchers with PrivateMethodTester {

  test("set libraryPath correctly") {
    val appId = "12345-worker321-9876"
    val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
    val cmd = new Command("mainClass", Seq(), Map(), Seq(), Seq("libraryPathToB"), Seq())
    val builder = CommandUtils.buildProcessBuilder(
      cmd, new SecurityManager(new SparkConf), 512, sparkHome, t => t)
    val libraryPath = Utils.libraryPathEnvName
    val env = builder.environment
    env.keySet should contain(libraryPath)
    assert(env.get(libraryPath).startsWith("libraryPathToB"))
  }

  test("auth secret shouldn't appear in java opts") {
    val buildLocalCommand = PrivateMethod[Command]('buildLocalCommand)
    val conf = new SparkConf
    val secret = "This is the secret sauce"
    // set auth secret
    conf.set(SecurityManager.SPARK_AUTH_SECRET_CONF, secret)
    val command = new Command("mainClass", Seq(), Map(), Seq(), Seq("lib"),
      Seq("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF + "=" + secret))

    // auth is not set
    var cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(!cmd.environment.contains(SecurityManager.ENV_AUTH_SECRET))

    // auth is set to false
    conf.set(SecurityManager.SPARK_AUTH_CONF, "false")
    cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(!cmd.environment.contains(SecurityManager.ENV_AUTH_SECRET))

    // auth is set to true
    conf.set(SecurityManager.SPARK_AUTH_CONF, "true")
    cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(cmd.environment(SecurityManager.ENV_AUTH_SECRET) === secret)
  }
} 
Example 3
Source File: LogPageSuite.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.worker.ui

import java.io.{File, FileWriter}

import org.mockito.Mockito.{mock, when}
import org.scalatest.PrivateMethodTester

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.worker.Worker

class LogPageSuite extends SparkFunSuite with PrivateMethodTester {

  test("get logs simple") {
    val webui = mock(classOf[WorkerWebUI])
    val worker = mock(classOf[Worker])
    val tmpDir = new File(sys.props("java.io.tmpdir"))
    val workDir = new File(tmpDir, "work-dir")
    workDir.mkdir()
    when(webui.workDir).thenReturn(workDir)
    when(webui.worker).thenReturn(worker)
    when(worker.conf).thenReturn(new SparkConf())
    val logPage = new LogPage(webui)

    // Prepare some fake log files to read later
    val out = "some stdout here"
    val err = "some stderr here"
    val tmpOut = new File(workDir, "stdout")
    val tmpErr = new File(workDir, "stderr")
    val tmpErrBad = new File(tmpDir, "stderr") // outside the working directory
    val tmpOutBad = new File(tmpDir, "stdout")
    val tmpRand = new File(workDir, "random")
    write(tmpOut, out)
    write(tmpErr, err)
    write(tmpOutBad, out)
    write(tmpErrBad, err)
    write(tmpRand, "1 6 4 5 2 7 8")

    // Get the logs. All log types other than "stderr" or "stdout" will be rejected
    val getLog = PrivateMethod[(String, Long, Long, Long)]('getLog)
    val (stdout, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "stdout", None, 100)
    val (stderr, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "stderr", None, 100)
    val (error1, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "random", None, 100)
    val (error2, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "does-not-exist.txt", None, 100)
    // These files exist, but live outside the working directory
    val (error3, _, _, _) =
      logPage invokePrivate getLog(tmpDir.getAbsolutePath, "stderr", None, 100)
    val (error4, _, _, _) =
      logPage invokePrivate getLog(tmpDir.getAbsolutePath, "stdout", None, 100)
    assert(stdout === out)
    assert(stderr === err)
    assert(error1.startsWith("Error: Log type must be one of "))
    assert(error2.startsWith("Error: Log type must be one of "))
    assert(error3.startsWith("Error: invalid log directory"))
    assert(error4.startsWith("Error: invalid log directory"))
  }

  
  private def write(f: File, s: String): Unit = {
    val writer = new FileWriter(f)
    try {
      writer.write(s)
    } finally {
      writer.close()
    }
  }

} 
Example 4
Source File: LogPageSuite.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.worker.ui

import java.io.{File, FileWriter}

import org.mockito.Mockito.{mock, when}
import org.scalatest.PrivateMethodTester

import org.apache.spark.SparkFunSuite

class LogPageSuite extends SparkFunSuite with PrivateMethodTester {

  test("get logs simple") {
    val webui = mock(classOf[WorkerWebUI])
    val tmpDir = new File(sys.props("java.io.tmpdir"))
    val workDir = new File(tmpDir, "work-dir")
    workDir.mkdir()
    when(webui.workDir).thenReturn(workDir)
    val logPage = new LogPage(webui)

    // Prepare some fake log files to read later
    val out = "some stdout here"
    val err = "some stderr here"
    val tmpOut = new File(workDir, "stdout")
    val tmpErr = new File(workDir, "stderr")
    val tmpErrBad = new File(tmpDir, "stderr") // outside the working directory
    val tmpOutBad = new File(tmpDir, "stdout")
    val tmpRand = new File(workDir, "random")
    write(tmpOut, out)
    write(tmpErr, err)
    write(tmpOutBad, out)
    write(tmpErrBad, err)
    write(tmpRand, "1 6 4 5 2 7 8")

    // Get the logs. All log types other than "stderr" or "stdout" will be rejected
    val getLog = PrivateMethod[(String, Long, Long, Long)]('getLog)
    val (stdout, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "stdout", None, 100)
    val (stderr, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "stderr", None, 100)
    val (error1, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "random", None, 100)
    val (error2, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "does-not-exist.txt", None, 100)
    // These files exist, but live outside the working directory
    val (error3, _, _, _) =
      logPage invokePrivate getLog(tmpDir.getAbsolutePath, "stderr", None, 100)
    val (error4, _, _, _) =
      logPage invokePrivate getLog(tmpDir.getAbsolutePath, "stdout", None, 100)
    assert(stdout === out)
    assert(stderr === err)
    assert(error1.startsWith("Error: Log type must be one of "))
    assert(error2.startsWith("Error: Log type must be one of "))
    assert(error3.startsWith("Error: invalid log directory"))
    assert(error4.startsWith("Error: invalid log directory"))
  }

  
  private def write(f: File, s: String): Unit = {
    val writer = new FileWriter(f)
    try {
      writer.write(s)
    } finally {
      writer.close()
    }
  }

} 
Example 5
Source File: SparkRBackendSpec.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.workflowexecutor

import org.apache.spark.api.r._
import org.scalatest.concurrent.TimeLimits
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, PrivateMethodTester, WordSpec}

import ai.deepsense.workflowexecutor.customcode.CustomCodeEntryPoint

class SparkRBackendSpec
  extends WordSpec
  with MockitoSugar
  with Matchers
  with TimeLimits
  with PrivateMethodTester {

  "Spark R Backend" should {
    "return 0 for Entry Point Id" in {
      val sparkRBackend = new SparkRBackend()
      val customCodeEntryPoint = mock[CustomCodeEntryPoint]
      sparkRBackend.start(customCodeEntryPoint)
      sparkRBackend.entryPointId shouldBe "0"
      sparkRBackend.close()
    }
  }
} 
Example 6
Source File: AnomalyDetectorTest.scala    From deequ   with Apache License 2.0 5 votes vote down vote up
package com.amazon.deequ.anomalydetection

import org.scalamock.scalatest.MockFactory
import org.scalatest.{Matchers, PrivateMethodTester, WordSpec}


class AnomalyDetectorTest extends WordSpec with Matchers with MockFactory with PrivateMethodTester {
  private val fakeAnomalyDetector = stub[AnomalyDetectionStrategy]

  val aD = AnomalyDetector(fakeAnomalyDetector)
  val data = Seq((0L, -1.0), (1L, 2.0), (2L, 3.0), (3L, 0.5)).map { case (t, v) =>
    DataPoint[Double](t, Option(v))
  }

  "Anomaly Detector" should {

    "ignore missing values" in {
      val data = Seq(DataPoint[Double](0L, Option(1.0)), DataPoint[Double](1L, Option(2.0)),
        DataPoint[Double](2L, None), DataPoint[Double](3L, Option(1.0)))

      (fakeAnomalyDetector.detect _ when(Vector(1.0, 2.0, 1.0), (0, 3)))
        .returns(Seq((1, Anomaly(Option(2.0), 1.0))))

      val anomalyResult = aD.detectAnomaliesInHistory(data, (0L, 4L))

      assert(anomalyResult == DetectionResult(Seq((1L, Anomaly(Option(2.0), 1.0)))))
    }

    "only detect values in range" in {
      (fakeAnomalyDetector.detect _ when(Vector(-1.0, 2.0, 3.0, 0.5), (2, 4)))
        .returns(Seq((2, Anomaly(Option(3.0), 1.0))))

      val anomalyResult = aD.detectAnomaliesInHistory(data, (2L, 4L))

      assert(anomalyResult == DetectionResult(Seq((2L, Anomaly(Option(3.0), 1.0)))))
    }

    "throw an error when intervals are not ordered" in {
      intercept[IllegalArgumentException] {
        aD.detectAnomaliesInHistory(data, (4, 2))
      }
    }

    "treat ordered values with time gaps correctly" in {
      val data = (for (i <- 1 to 10) yield {
        (i.toLong * 200L) -> 5.0
      }).map { case (t, v) =>
        DataPoint[Double](t, Option(v))
      }

      (fakeAnomalyDetector.detect _ when(data.map(_.metricValue.get).toVector, (0, 2)))
        .returns (Seq((0, Anomaly(Option(5.0), 1.0)), (1, Anomaly(Option(5.0), 1.0))))

      val anomalyResult = aD.detectAnomaliesInHistory(data, (200L, 401L))

      assert(anomalyResult == DetectionResult(Seq((200L, Anomaly(Option(5.0), 1.0)),
        (400L, Anomaly(Option(5.0), 1.0)))))
    }

    "treat unordered values with time gaps correctly" in {
      val data = Seq((10L, -1.0), (25L, 2.0), (11L, 3.0), (0L, 0.5)).map { case (t, v) =>
        DataPoint[Double](t, Option(v))
      }
      val tS = AnomalyDetector(SimpleThresholdStrategy(lowerBound = -0.5, upperBound = 1.0))

      (fakeAnomalyDetector.detect _ when(Vector(0.5, -1.0, 3.0, 2.0), (0, 4)))
        .returns(Seq((1, Anomaly(Option(-1.0), 1.0)), (2, Anomaly(Option(3.0), 1.0)),
          (3, Anomaly(Option(2.0), 1.0))))

      val anomalyResult = aD.detectAnomaliesInHistory(data)

      assert(anomalyResult == DetectionResult(Seq((10L, Anomaly(Option(-1.0), 1.0)),
        (11L, Anomaly(Option(3.0), 1.0)), (25L, Anomaly(Option(2.0), 1.0)))))
    }

    "treat unordered values without time gaps correctly" in {
      val data = Seq((1L, -1.0), (3L, 2.0), (2L, 3.0), (0L, 0.5)).map { case (t, v) =>
        DataPoint[Double](t, Option(v))
      }

      (fakeAnomalyDetector.detect _ when(Vector(0.5, -1.0, 3.0, 2.0), (0, 4)))
        .returns(Seq((1, Anomaly(Option(-1.0), 1.0)), (2, Anomaly(Option(3.0), 1.0)),
          (3, Anomaly(Option(2.0), 1.0))))

      val anomalyResult = aD.detectAnomaliesInHistory(data)

      assert(anomalyResult == DetectionResult(Seq((1L, Anomaly(Option(-1.0), 1.0)),
        (2L, Anomaly(Option(3.0), 1.0)), (3L, Anomaly(Option(2.0), 1.0)))))
    }

  }
} 
Example 7
Source File: CachedKafkaProducerSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.kafka010

import java.{util => ju}
import java.util.concurrent.ConcurrentMap

import org.apache.kafka.clients.producer.KafkaProducer
import org.apache.kafka.common.serialization.ByteArraySerializer
import org.scalatest.PrivateMethodTester

import org.apache.spark.sql.test.SharedSQLContext

class CachedKafkaProducerSuite extends SharedSQLContext with PrivateMethodTester {

  type KP = KafkaProducer[Array[Byte], Array[Byte]]

  protected override def beforeEach(): Unit = {
    super.beforeEach()
    val clear = PrivateMethod[Unit]('clear)
    CachedKafkaProducer.invokePrivate(clear())
  }

  test("Should return the cached instance on calling getOrCreate with same params.") {
    val kafkaParams = new ju.HashMap[String, Object]()
    kafkaParams.put("acks", "0")
    // Here only host should be resolvable, it does not need a running instance of kafka server.
    kafkaParams.put("bootstrap.servers", "127.0.0.1:9022")
    kafkaParams.put("key.serializer", classOf[ByteArraySerializer].getName)
    kafkaParams.put("value.serializer", classOf[ByteArraySerializer].getName)
    val producer = CachedKafkaProducer.getOrCreate(kafkaParams)
    val producer2 = CachedKafkaProducer.getOrCreate(kafkaParams)
    assert(producer == producer2)

    val cacheMap = PrivateMethod[ConcurrentMap[Seq[(String, Object)], KP]]('getAsMap)
    val map = CachedKafkaProducer.invokePrivate(cacheMap())
    assert(map.size == 1)
  }

  test("Should close the correct kafka producer for the given kafkaPrams.") {
    val kafkaParams = new ju.HashMap[String, Object]()
    kafkaParams.put("acks", "0")
    kafkaParams.put("bootstrap.servers", "127.0.0.1:9022")
    kafkaParams.put("key.serializer", classOf[ByteArraySerializer].getName)
    kafkaParams.put("value.serializer", classOf[ByteArraySerializer].getName)
    val producer: KP = CachedKafkaProducer.getOrCreate(kafkaParams)
    kafkaParams.put("acks", "1")
    val producer2: KP = CachedKafkaProducer.getOrCreate(kafkaParams)
    // With updated conf, a new producer instance should be created.
    assert(producer != producer2)

    val cacheMap = PrivateMethod[ConcurrentMap[Seq[(String, Object)], KP]]('getAsMap)
    val map = CachedKafkaProducer.invokePrivate(cacheMap())
    assert(map.size == 2)

    CachedKafkaProducer.close(kafkaParams)
    val map2 = CachedKafkaProducer.invokePrivate(cacheMap())
    assert(map2.size == 1)
    import scala.collection.JavaConverters._
    val (seq: Seq[(String, Object)], _producer: KP) = map2.asScala.toArray.apply(0)
    assert(_producer == producer)
  }
} 
Example 8
Source File: RecurringTimerSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.util

import java.util.concurrent.ConcurrentLinkedQueue

import scala.collection.JavaConverters._
import scala.concurrent.duration._

import org.scalatest.PrivateMethodTester
import org.scalatest.concurrent.Eventually._

import org.apache.spark.SparkFunSuite
import org.apache.spark.util.ManualClock

class RecurringTimerSuite extends SparkFunSuite with PrivateMethodTester {

  test("basic") {
    val clock = new ManualClock()
    val results = new ConcurrentLinkedQueue[Long]()
    val timer = new RecurringTimer(clock, 100, time => {
      results.add(time)
    }, "RecurringTimerSuite-basic")
    timer.start(0)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L))
    }
    clock.advance(100)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L, 100L))
    }
    clock.advance(200)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L, 100L, 200L, 300L))
    }
    assert(timer.stop(interruptTimer = true) === 300L)
  }

  test("SPARK-10224: call 'callback' after stopping") {
    val clock = new ManualClock()
    val results = new ConcurrentLinkedQueue[Long]
    val timer = new RecurringTimer(clock, 100, time => {
      results.add(time)
    }, "RecurringTimerSuite-SPARK-10224")
    timer.start(0)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L))
    }
    @volatile var lastTime = -1L
    // Now RecurringTimer is waiting for the next interval
    val thread = new Thread {
      override def run(): Unit = {
        lastTime = timer.stop(interruptTimer = false)
      }
    }
    thread.start()
    val stopped = PrivateMethod[RecurringTimer]('stopped)
    // Make sure the `stopped` field has been changed
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(timer.invokePrivate(stopped()) === true)
    }
    clock.advance(200)
    // When RecurringTimer is awake from clock.waitTillTime, it will call `callback` once.
    // Then it will find `stopped` is true and exit the loop, but it should call `callback` again
    // before exiting its internal thread.
    thread.join()
    assert(results.asScala.toSeq === Seq(0L, 100L, 200L))
    assert(lastTime === 200L)
  }
} 
Example 9
Source File: CommandUtilsSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.worker

import org.scalatest.{Matchers, PrivateMethodTester}

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.Command
import org.apache.spark.util.Utils

class CommandUtilsSuite extends SparkFunSuite with Matchers with PrivateMethodTester {

  test("set libraryPath correctly") {
    val appId = "12345-worker321-9876"
    val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
    val cmd = new Command("mainClass", Seq(), Map(), Seq(), Seq("libraryPathToB"), Seq())
    val builder = CommandUtils.buildProcessBuilder(
      cmd, new SecurityManager(new SparkConf), 512, sparkHome, t => t)
    val libraryPath = Utils.libraryPathEnvName
    val env = builder.environment
    env.keySet should contain(libraryPath)
    assert(env.get(libraryPath).startsWith("libraryPathToB"))
  }

  test("auth secret shouldn't appear in java opts") {
    val buildLocalCommand = PrivateMethod[Command]('buildLocalCommand)
    val conf = new SparkConf
    val secret = "This is the secret sauce"
    // set auth secret
    conf.set(SecurityManager.SPARK_AUTH_SECRET_CONF, secret)
    val command = new Command("mainClass", Seq(), Map(), Seq(), Seq("lib"),
      Seq("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF + "=" + secret))

    // auth is not set
    var cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(!cmd.environment.contains(SecurityManager.ENV_AUTH_SECRET))

    // auth is set to false
    conf.set(SecurityManager.SPARK_AUTH_CONF, "false")
    cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(!cmd.environment.contains(SecurityManager.ENV_AUTH_SECRET))

    // auth is set to true
    conf.set(SecurityManager.SPARK_AUTH_CONF, "true")
    cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(cmd.environment(SecurityManager.ENV_AUTH_SECRET) === secret)
  }
} 
Example 10
Source File: RecurringTimerSuite.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.util

import java.util.concurrent.ConcurrentLinkedQueue

import scala.collection.JavaConverters._
import scala.concurrent.duration._

import org.scalatest.PrivateMethodTester
import org.scalatest.concurrent.Eventually._

import org.apache.spark.SparkFunSuite
import org.apache.spark.util.ManualClock

class RecurringTimerSuite extends SparkFunSuite with PrivateMethodTester {

  test("basic") {
    val clock = new ManualClock()
    val results = new ConcurrentLinkedQueue[Long]()
    val timer = new RecurringTimer(clock, 100, time => {
      results.add(time)
    }, "RecurringTimerSuite-basic")
    timer.start(0)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L))
    }
    clock.advance(100)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L, 100L))
    }
    clock.advance(200)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L, 100L, 200L, 300L))
    }
    assert(timer.stop(interruptTimer = true) === 300L)
  }

  test("SPARK-10224: call 'callback' after stopping") {
    val clock = new ManualClock()
    val results = new ConcurrentLinkedQueue[Long]
    val timer = new RecurringTimer(clock, 100, time => {
      results.add(time)
    }, "RecurringTimerSuite-SPARK-10224")
    timer.start(0)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L))
    }
    @volatile var lastTime = -1L
    // Now RecurringTimer is waiting for the next interval
    val thread = new Thread {
      override def run(): Unit = {
        lastTime = timer.stop(interruptTimer = false)
      }
    }
    thread.start()
    val stopped = PrivateMethod[RecurringTimer]('stopped)
    // Make sure the `stopped` field has been changed
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(timer.invokePrivate(stopped()) === true)
    }
    clock.advance(200)
    // When RecurringTimer is awake from clock.waitTillTime, it will call `callback` once.
    // Then it will find `stopped` is true and exit the loop, but it should call `callback` again
    // before exiting its internal thread.
    thread.join()
    assert(results.asScala.toSeq === Seq(0L, 100L, 200L))
    assert(lastTime === 200L)
  }
} 
Example 11
Source File: RecurringTimerSuite.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.util

import scala.collection.mutable
import scala.concurrent.duration._

import org.scalatest.PrivateMethodTester
import org.scalatest.concurrent.Eventually._

import org.apache.spark.SparkFunSuite
import org.apache.spark.util.ManualClock

class RecurringTimerSuite extends SparkFunSuite with PrivateMethodTester {

  test("basic") {
    val clock = new ManualClock()
    val results = new mutable.ArrayBuffer[Long]() with mutable.SynchronizedBuffer[Long]
    val timer = new RecurringTimer(clock, 100, time => {
      results += time
    }, "RecurringTimerSuite-basic")
    timer.start(0)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results === Seq(0L))
    }
    clock.advance(100)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results === Seq(0L, 100L))
    }
    clock.advance(200)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results === Seq(0L, 100L, 200L, 300L))
    }
    assert(timer.stop(interruptTimer = true) === 300L)
  }

  test("SPARK-10224: call 'callback' after stopping") {
    val clock = new ManualClock()
    val results = new mutable.ArrayBuffer[Long]() with mutable.SynchronizedBuffer[Long]
    val timer = new RecurringTimer(clock, 100, time => {
      results += time
    }, "RecurringTimerSuite-SPARK-10224")
    timer.start(0)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results === Seq(0L))
    }
    @volatile var lastTime = -1L
    // Now RecurringTimer is waiting for the next interval
    val thread = new Thread {
      override def run(): Unit = {
        lastTime = timer.stop(interruptTimer = false)
      }
    }
    thread.start()
    val stopped = PrivateMethod[RecurringTimer]('stopped)
    // Make sure the `stopped` field has been changed
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(timer.invokePrivate(stopped()) === true)
    }
    clock.advance(200)
    // When RecurringTimer is awake from clock.waitTillTime, it will call `callback` once.
    // Then it will find `stopped` is true and exit the loop, but it should call `callback` again
    // before exiting its internal thread.
    thread.join()
    assert(results === Seq(0L, 100L, 200L))
    assert(lastTime === 200L)
  }
} 
Example 12
Source File: CommandUtilsSuite.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.worker

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.Command
import org.apache.spark.util.Utils
import org.scalatest.{Matchers, PrivateMethodTester}

class CommandUtilsSuite extends SparkFunSuite with Matchers with PrivateMethodTester {

  test("set libraryPath correctly") {
    val appId = "12345-worker321-9876"
    val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
    val cmd = new Command("mainClass", Seq(), Map(), Seq(), Seq("libraryPathToB"), Seq())
    val builder = CommandUtils.buildProcessBuilder(
      cmd, new SecurityManager(new SparkConf), 512, sparkHome, t => t)
    val libraryPath = Utils.libraryPathEnvName
    val env = builder.environment
    env.keySet should contain(libraryPath)
    assert(env.get(libraryPath).startsWith("libraryPathToB"))
  }

  test("auth secret shouldn't appear in java opts") {
    val buildLocalCommand = PrivateMethod[Command]('buildLocalCommand)
    val conf = new SparkConf
    val secret = "This is the secret sauce"
    // set auth secret
    conf.set(SecurityManager.SPARK_AUTH_SECRET_CONF, secret)
    val command = new Command("mainClass", Seq(), Map(), Seq(), Seq("lib"),
      Seq("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF + "=" + secret))

    // auth is not set
    var cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(!cmd.environment.contains(SecurityManager.ENV_AUTH_SECRET))

    // auth is set to false
    conf.set(SecurityManager.SPARK_AUTH_CONF, "false")
    cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(!cmd.environment.contains(SecurityManager.ENV_AUTH_SECRET))

    // auth is set to true
    conf.set(SecurityManager.SPARK_AUTH_CONF, "true")
    cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(cmd.environment(SecurityManager.ENV_AUTH_SECRET) === secret)
  }
} 
Example 13
Source File: LogPageSuite.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.worker.ui

import java.io.{File, FileWriter}

import org.mockito.Mockito.{mock, when}
import org.scalatest.PrivateMethodTester

import org.apache.spark.SparkFunSuite

class LogPageSuite extends SparkFunSuite with PrivateMethodTester {

  test("get logs simple") {
    val webui = mock(classOf[WorkerWebUI])
    val tmpDir = new File(sys.props("java.io.tmpdir"))
    val workDir = new File(tmpDir, "work-dir")
    workDir.mkdir()
    when(webui.workDir).thenReturn(workDir)
    val logPage = new LogPage(webui)

    // Prepare some fake log files to read later
    val out = "some stdout here"
    val err = "some stderr here"
    val tmpOut = new File(workDir, "stdout")
    val tmpErr = new File(workDir, "stderr")
    val tmpErrBad = new File(tmpDir, "stderr") // outside the working directory
    val tmpOutBad = new File(tmpDir, "stdout")
    val tmpRand = new File(workDir, "random")
    write(tmpOut, out)
    write(tmpErr, err)
    write(tmpOutBad, out)
    write(tmpErrBad, err)
    write(tmpRand, "1 6 4 5 2 7 8")

    // Get the logs. All log types other than "stderr" or "stdout" will be rejected
    val getLog = PrivateMethod[(String, Long, Long, Long)]('getLog)
    val (stdout, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "stdout", None, 100)
    val (stderr, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "stderr", None, 100)
    val (error1, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "random", None, 100)
    val (error2, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "does-not-exist.txt", None, 100)
    // These files exist, but live outside the working directory
    val (error3, _, _, _) =
      logPage invokePrivate getLog(tmpDir.getAbsolutePath, "stderr", None, 100)
    val (error4, _, _, _) =
      logPage invokePrivate getLog(tmpDir.getAbsolutePath, "stdout", None, 100)
    assert(stdout === out)
    assert(stderr === err)
    assert(error1.startsWith("Error: Log type must be one of "))
    assert(error2.startsWith("Error: Log type must be one of "))
    assert(error3.startsWith("Error: invalid log directory"))
    assert(error4.startsWith("Error: invalid log directory"))
  }

  
  private def write(f: File, s: String): Unit = {
    val writer = new FileWriter(f)
    try {
      writer.write(s)
    } finally {
      writer.close()
    }
  }

} 
Example 14
Source File: PrometheusModuleSpec.scala    From play-prometheus-filters   with MIT License 5 votes vote down vote up
package com.github.stijndehaes.playprometheusfilters

import io.prometheus.client.{Collector, CollectorRegistry}
import org.scalatest.{BeforeAndAfter, MustMatchers, PrivateMethodTester, WordSpec}
import org.scalatestplus.play.guice.GuiceOneAppPerTest
import play.api.inject.guice.GuiceApplicationBuilder

class PrometheusModuleSpec extends WordSpec with MustMatchers with BeforeAndAfter with PrivateMethodTester with GuiceOneAppPerTest {

  before {
    // clearing registry before each test
    CollectorRegistry.defaultRegistry.clear()
  }

  "PrometheusModule" should {
    "register default exporters when enabled" in {
      // default enabled
      val app = new GuiceApplicationBuilder()
        .configure(PrometheusModule.defaultExportsKey -> true)
        .build()

      val collector = app.injector.instanceOf[CollectorRegistry]
      val collectors = PrivateMethod[java.util.HashSet[Collector]]('collectors)
      (collector invokePrivate collectors()).size must be > 0
    }

    "not register default exporters when disabled" in {
      // disable default exporters
      val app = new GuiceApplicationBuilder()
        .configure(PrometheusModule.defaultExportsKey -> false)
        .build()

      val collector = app.injector.instanceOf[CollectorRegistry]
      val collectors = PrivateMethod[java.util.HashSet[Collector]]('collectors)
      (collector invokePrivate collectors()).size must be (0)
    }
  }

  
    def getExporterNames: Seq[String] = {
      val exportNames = collection.mutable.Buffer.empty[String]
      val mfs = registry.metricFamilySamples()
      while(mfs.hasMoreElements) {
        exportNames += mfs.nextElement().name
      }
      exportNames
    }
  }
} 
Example 15
Source File: CachedPulsarClientSuite.scala    From pulsar-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.pulsar

import java.util.concurrent.ConcurrentMap
import java.{util => ju}

import org.scalatest.PrivateMethodTester

import org.apache.pulsar.client.api.PulsarClient
import org.apache.spark.sql.test.SharedSQLContext

class CachedPulsarClientSuite extends SharedSQLContext with PrivateMethodTester with PulsarTest {

  import PulsarOptions._

  type KP = PulsarClient

  protected override def beforeEach(): Unit = {
    super.beforeEach()
    CachedPulsarClient.clear()
  }

  test("Should return the cached instance on calling getOrCreate with same params.") {
    val pulsarParams = new ju.HashMap[String, Object]()
    // Here only host should be resolvable, it does not need a running instance of pulsar server.
    pulsarParams.put(SERVICE_URL_OPTION_KEY, "pulsar://127.0.0.1:6650")
    pulsarParams.put("concurrentLookupRequest", "10000")
    val producer = CachedPulsarClient.getOrCreate(pulsarParams)
    val producer2 = CachedPulsarClient.getOrCreate(pulsarParams)
    assert(producer == producer2)

    val cacheMap = PrivateMethod[ConcurrentMap[Seq[(String, Object)], KP]]('getAsMap)
    val map = CachedPulsarClient.invokePrivate(cacheMap())
    assert(map.size == 1)
  }

  test("Should close the correct pulsar producer for the given pulsarPrams.") {
    val pulsarParams = new ju.HashMap[String, Object]()
    pulsarParams.put(SERVICE_URL_OPTION_KEY, "pulsar://127.0.0.1:6650")
    pulsarParams.put("concurrentLookupRequest", "10000")
    val producer: KP = CachedPulsarClient.getOrCreate(pulsarParams)
    pulsarParams.put("concurrentLookupRequest", "20000")
    val producer2: KP = CachedPulsarClient.getOrCreate(pulsarParams)
    // With updated conf, a new producer instance should be created.
    assert(producer != producer2)

    val cacheMap = PrivateMethod[ConcurrentMap[Seq[(String, Object)], KP]]('getAsMap)
    val map = CachedPulsarClient.invokePrivate(cacheMap())
    assert(map.size == 2)

    CachedPulsarClient.close(pulsarParams)
    val map2 = CachedPulsarClient.invokePrivate(cacheMap())
    assert(map2.size == 1)
    import scala.collection.JavaConverters._
    val (seq: Seq[(String, Object)], _producer: KP) = map2.asScala.toArray.apply(0)
    assert(_producer == producer)
  }
} 
Example 16
Source File: MicroBlockMinerSpec.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.mining

import com.wavesplatform.account.Alias
import com.wavesplatform.block.Block
import com.wavesplatform.common.utils._
import com.wavesplatform.db.WithDomain
import com.wavesplatform.features.BlockchainFeatures
import com.wavesplatform.lagonaki.mocks.TestBlock
import com.wavesplatform.mining.microblocks.MicroBlockMinerImpl
import com.wavesplatform.mining.microblocks.MicroBlockMinerImpl.MicroBlockMiningResult
import com.wavesplatform.settings.TestFunctionalitySettings
import com.wavesplatform.transaction.{CreateAliasTransaction, GenesisTransaction, TxVersion}
import com.wavesplatform.utils.Schedulers
import com.wavesplatform.utx.UtxPoolImpl
import com.wavesplatform.{TestValues, TransactionGen}
import monix.eval.Task
import monix.execution.Scheduler
import org.scalamock.scalatest.PathMockFactory
import org.scalatest.{FlatSpec, Matchers, PrivateMethodTester}

import scala.concurrent.duration._
import scala.util.Random

class MicroBlockMinerSpec extends FlatSpec with Matchers with PrivateMethodTester with PathMockFactory with WithDomain with TransactionGen {
  "Micro block miner" should "generate microblocks in flat interval" in {
    val scheduler = Schedulers.singleThread("test")
    val acc       = TestValues.keyPair
    val genesis   = GenesisTransaction.create(acc.toAddress, TestValues.bigMoney, TestValues.timestamp).explicitGet()
    val settings  = domainSettingsWithFS(TestFunctionalitySettings.withFeatures(BlockchainFeatures.NG))
    withDomain(settings) { d =>
      d.appendBlock(TestBlock.create(Seq(genesis)))
      val utxPool = new UtxPoolImpl(ntpTime, d.blockchainUpdater, ignoreSpendableBalanceChanged, settings.utxSettings, enablePriorityPool = true)
      val microBlockMiner = new MicroBlockMinerImpl(
        _ => (),
        null,
        d.blockchainUpdater,
        utxPool,
        settings.minerSettings,
        scheduler,
        scheduler
      )
      val generateOneMicroBlockTask = PrivateMethod[Task[MicroBlockMiningResult]](Symbol("generateOneMicroBlockTask"))

      def generateBlocks(
          block: Block,
          constraint: MiningConstraint,
          lastMicroBlock: Long
      ): Block = {
        val task = microBlockMiner invokePrivate generateOneMicroBlockTask(
          acc,
          block,
          MiningConstraints(d.blockchainUpdater, d.blockchainUpdater.height, Some(settings.minerSettings)),
          constraint,
          lastMicroBlock
        )
        import Scheduler.Implicits.global
        val startTime = System.nanoTime()
        val tx = CreateAliasTransaction
          .selfSigned(TxVersion.V1, acc, Alias.create("test" + Random.nextInt()).explicitGet(), TestValues.fee, TestValues.timestamp)
          .explicitGet()
        utxPool.putIfNew(tx).resultE.explicitGet()
        val result = task.runSyncUnsafe()
        result match {
          case res @ MicroBlockMinerImpl.Success(b, totalConstraint) =>
            val isFirstBlock = block.transactionData.isEmpty
            val elapsed = (res.nanoTime - startTime).nanos.toMillis

            if (isFirstBlock) elapsed should be < 1000L
            else elapsed shouldBe settings.minerSettings.microBlockInterval.toMillis +- 1000

            generateBlocks(b, totalConstraint, res.nanoTime)
          case MicroBlockMinerImpl.Stop =>
            d.blockchainUpdater.liquidBlock(d.blockchainUpdater.lastBlockId.get).get
          case MicroBlockMinerImpl.Retry =>
            throw new IllegalStateException()
        }
      }

      val baseBlock = Block
        .buildAndSign(
          3,
          TestValues.timestamp,
          d.lastBlockId,
          d.lastBlock.header.baseTarget,
          d.lastBlock.header.generationSignature,
          Nil,
          acc,
          Nil,
          0
        )
        .explicitGet()

      d.appendBlock(baseBlock)

      val constraint = OneDimensionalMiningConstraint(5, TxEstimators.one, "limit")
      val lastBlock = generateBlocks(baseBlock, constraint, 0)
      lastBlock.transactionData should have size constraint.rest.toInt
    }
    }
} 
Example 17
Source File: ScalastyleRunnerSpec.scala    From sonar-scala   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.ncredinburgh.sonar.scalastyle

import java.io.File
import java.nio.charset.StandardCharsets

import org.mockito.Mockito._
import org.scalastyle._
import org.scalastyle.StyleError
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FlatSpec, Matchers, PrivateMethodTester}
import org.sonar.api.profiles.RulesProfile
import org.sonar.api.rules.{Rule, RulePriority}

import scala.collection.JavaConversions._


class ScalastyleRunnerSpec extends FlatSpec with Matchers with MockitoSugar with PrivateMethodTester {

  trait Fixture {
    val checker1 = ConfigurationChecker("org.scalastyle.scalariform.MultipleStringLiteralsChecker", ErrorLevel, true, Map(), None, None)
    val checker2 = ConfigurationChecker("org.scalastyle.file.HeaderMatchesChecker", ErrorLevel, true, Map("header" -> "// Expected Header Comment"), None, None)
    val configuration = ScalastyleConfiguration("sonar", true, List(checker1, checker2))
    val testeeSpy = spy(new ScalastyleRunner(mock[RulesProfile]))
    doReturn(configuration).when(testeeSpy).config
    val charset = StandardCharsets.UTF_8.name
  }


  "a scalastyle runner" should "report StyleError messages if there are rule violations" in new Fixture {
    val files = List(new File("src/test/resources/ScalaFile1.scala"))

    val messages = testeeSpy.run(charset, files).map(_.toString)

    messages should contain ("StyleError key=header.matches args=List() lineNumber=Some(1) column=None customMessage=None")

  }

  it should "not report StyleError messages if there are no violations" in new Fixture {
    val files = List(new File("src/test/resources/ScalaFile2.scala"))

    val messages = testeeSpy.run(charset, files)

    messages.length shouldEqual 0
  }

  it should "scan multiple files" in new Fixture {
    val files = List(new File("src/test/resources/ScalaFile1.scala"), new File("src/test/resources/ScalaFile2.scala"))

    val messages = testeeSpy.run(charset, files)

    messages.length shouldEqual 1
  }

  it should "convert rules to checker" in {
    val ruleToChecker = PrivateMethod[ConfigurationChecker]('ruleToChecker)
    val profile = RulesProfile.create(Constants.ProfileName, Constants.ScalaKey)
    val testee = new ScalastyleRunner(profile)
    val key = "multiple.string.literals"
    val className = "org.scalastyle.scalariform.MultipleStringLiteralsChecker"
    val rule = Rule.create
    rule.setRepositoryKey(Constants.RepositoryKey)
      .setKey(className)
      .setName(ScalastyleResources.label(key))
      .setDescription(ScalastyleResources.description(key))
      .setConfigKey(key)
      .setSeverity(RulePriority.MAJOR)
    rule.createParameter
      .setKey("allowed")
      .setDescription("")
      .setType("integer")
      .setDefaultValue("1")
    rule.createParameter
      .setKey("ignoreRegex")
      .setDescription("")
      .setType("integer")
      .setDefaultValue("^&quot;&quot;$")

    // add synthetic parameter as reference to the class
    rule.createParameter
      .setKey(Constants.ClazzParam)
      .setDescription("Scalastyle checker that validates the rule.")
      .setType("string")
      .setDefaultValue("org.scalastyle.scalariform.MultipleStringLiteralsChecker")

    val activeRule = profile.activateRule(rule, rule.getSeverity)
    activeRule.setParameter("allowed", "1")
    activeRule.setParameter("ignoreRegex", "^&quot;&quot;$")
    activeRule.setParameter(Constants.ClazzParam, "org.scalastyle.scalariform.MultipleStringLiteralsChecker")

    val checker = testee invokePrivate ruleToChecker(activeRule)
    val expectedParameters = Map("allowed" -> "1", "ignoreRegex" -> "^&quot;&quot;$", Constants.ClazzParam -> "org.scalastyle.scalariform.MultipleStringLiteralsChecker")
    val expectedChecker = ConfigurationChecker(className, ErrorLevel, true, expectedParameters, None, Some(className))

    checker shouldEqual expectedChecker
  }
} 
Example 18
Source File: SparkRBackendSpec.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.workflowexecutor

import org.apache.spark.api.r._
import org.scalatest.concurrent.TimeLimits
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{Matchers, PrivateMethodTester, WordSpec}

import io.deepsense.workflowexecutor.customcode.CustomCodeEntryPoint

class SparkRBackendSpec
  extends WordSpec
  with MockitoSugar
  with Matchers
  with TimeLimits
  with PrivateMethodTester {

  "Spark R Backend" should {
    "return 0 for Entry Point Id" in {
      val sparkRBackend = new SparkRBackend()
      val customCodeEntryPoint = mock[CustomCodeEntryPoint]
      sparkRBackend.start(customCodeEntryPoint)
      sparkRBackend.entryPointId shouldBe "0"
      sparkRBackend.close()
    }
  }
} 
Example 19
Source File: DependencyAnalyzerTest.scala    From schedoscope   with Apache License 2.0 5 votes vote down vote up
package org.schedoscope.lineage

import org.scalatest.prop.TableDrivenPropertyChecks
import org.scalatest.{FlatSpec, Matchers, PrivateMethodTester}
import org.schedoscope.dsl.Parameter.p
import org.schedoscope.dsl.views.DateParameterizationUtils.today
import org.schedoscope.lineage.DependencyAnalyzer.{analyzeDependencies, analyzeLineage}
import test.views.{ClickOfEC01, ProductBrand}


class DependencyAnalyzerTest extends FlatSpec with Matchers with PrivateMethodTester with TableDrivenPropertyChecks {
  private val preprocessSql = PrivateMethod[String]('preprocessSql)

  "The dependency analyzer" should "analyze lineage for ProductBrand correctly" in {
    val v = ProductBrand(p("EC0101"), today._1, today._2, today._3)

    analyzeLineage(v).get shouldEqual Map(
      v.occurredAt → Set(v.product().occurredAt),
      v.productId → Set(v.product().id),
      v.brandName → Set(v.brand().name),
      v.createdAt → Set(),
      v.createdBy → Set()
    )
  }

  it should "analyze dependencies for ProductBrand correctly" in {
    val v = ProductBrand(p("EC0201"), today._1, today._2, today._3)

    analyzeDependencies(v).get shouldEqual Map(
      v.occurredAt → Set(v.product().occurredAt, v.product().year, v.product().month, v.product().day,
        v.product().brandId, v.brand().id),
      v.productId → Set(v.product().id, v.product().year, v.product().month, v.product().day, v.product().brandId,
        v.brand().id),
      v.brandName → Set(v.brand().name, v.product().year, v.product().month, v.product().day, v.product().brandId,
        v.brand().id),
      v.createdAt → Set(),
      v.createdBy → Set()
    )
  }

  it should "analyze lineage for ClickOfEC0101 correctly" in {
    val v = ClickOfEC01(today._1, today._2, today._3)

    analyzeLineage(v).get shouldEqual Map(
      v.id → Set(v.click().id),
      v.url → Set(v.click().url)
    )
  }

  it should "analyze dependencies for ClickOfEC0101 correctly" in {
    val v = ClickOfEC01(today._1, today._2, today._3)

    analyzeDependencies(v).get shouldEqual Map(
      v.id → Set(v.click().id, v.click().shopCode),
      v.url → Set(v.click().url, v.click().shopCode)
    )
  }

  it should "pre-process SQL by emptying double quoted strings" in {
    val badSql = """regexp_replace(csv_line_to_array.field_array[0] , "\"", "")"""
    val goodSql = """regexp_replace(csv_line_to_array.field_array[0] , '', '')"""

    DependencyAnalyzer invokePrivate preprocessSql(badSql) shouldEqual goodSql
  }

  it should "pre-process SQL by emptying double quoted strings with single quotes in them" in {
    val badSql = """"[\\s,\;.:\\u00bb\\u00ab\"'\\u0060\\u00b4|<>\\-_!\\u00a7a%&/()=?{\\[\\]}\\\\]""""
    val goodSql = "''"

    DependencyAnalyzer invokePrivate preprocessSql(badSql) shouldEqual goodSql
  }

  it should "pre-process SQL by emptying strings with escaped single quotes" in {
    val badSql = """unix_timestamp(last_value(wl.occurred_at) OVER session, 'yyyy-MM-dd\'T\'HH:mm:ss.SSSXXX')"""
    val goodSql = "unix_timestamp(last_value(wl.occurred_at) OVER session, '')"

    DependencyAnalyzer invokePrivate preprocessSql(badSql) shouldEqual goodSql
  }
} 
Example 20
Source File: RecurringTimerSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.util

import java.util.concurrent.ConcurrentLinkedQueue

import scala.collection.JavaConverters._
import scala.concurrent.duration._

import org.scalatest.PrivateMethodTester
import org.scalatest.concurrent.Eventually._

import org.apache.spark.SparkFunSuite
import org.apache.spark.util.ManualClock

class RecurringTimerSuite extends SparkFunSuite with PrivateMethodTester {

  test("basic") {
    val clock = new ManualClock()
    val results = new ConcurrentLinkedQueue[Long]()
    val timer = new RecurringTimer(clock, 100, time => {
      results.add(time)
    }, "RecurringTimerSuite-basic")
    timer.start(0)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L))
    }
    clock.advance(100)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L, 100L))
    }
    clock.advance(200)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L, 100L, 200L, 300L))
    }
    assert(timer.stop(interruptTimer = true) === 300L)
  }

  test("SPARK-10224: call 'callback' after stopping") {
    val clock = new ManualClock()
    val results = new ConcurrentLinkedQueue[Long]
    val timer = new RecurringTimer(clock, 100, time => {
      results.add(time)
    }, "RecurringTimerSuite-SPARK-10224")
    timer.start(0)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L))
    }
    @volatile var lastTime = -1L
    // Now RecurringTimer is waiting for the next interval
    val thread = new Thread {
      override def run(): Unit = {
        lastTime = timer.stop(interruptTimer = false)
      }
    }
    thread.start()
    val stopped = PrivateMethod[RecurringTimer]('stopped)
    // Make sure the `stopped` field has been changed
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(timer.invokePrivate(stopped()) === true)
    }
    clock.advance(200)
    // When RecurringTimer is awake from clock.waitTillTime, it will call `callback` once.
    // Then it will find `stopped` is true and exit the loop, but it should call `callback` again
    // before exiting its internal thread.
    thread.join()
    assert(results.asScala.toSeq === Seq(0L, 100L, 200L))
    assert(lastTime === 200L)
  }
} 
Example 21
Source File: CommandUtilsSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.worker

import org.scalatest.{Matchers, PrivateMethodTester}

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.Command
import org.apache.spark.util.Utils

class CommandUtilsSuite extends SparkFunSuite with Matchers with PrivateMethodTester {

  test("set libraryPath correctly") {
    val appId = "12345-worker321-9876"
    val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
    val cmd = new Command("mainClass", Seq(), Map(), Seq(), Seq("libraryPathToB"), Seq())
    val builder = CommandUtils.buildProcessBuilder(
      cmd, new SecurityManager(new SparkConf), 512, sparkHome, t => t)
    val libraryPath = Utils.libraryPathEnvName
    val env = builder.environment
    env.keySet should contain(libraryPath)
    assert(env.get(libraryPath).startsWith("libraryPathToB"))
  }

  test("auth secret shouldn't appear in java opts") {
    val buildLocalCommand = PrivateMethod[Command]('buildLocalCommand)
    val conf = new SparkConf
    val secret = "This is the secret sauce"
    // set auth secret
    conf.set(SecurityManager.SPARK_AUTH_SECRET_CONF, secret)
    val command = new Command("mainClass", Seq(), Map(), Seq(), Seq("lib"),
      Seq("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF + "=" + secret))

    // auth is not set
    var cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(!cmd.environment.contains(SecurityManager.ENV_AUTH_SECRET))

    // auth is set to false
    conf.set(SecurityManager.SPARK_AUTH_CONF, "false")
    cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(!cmd.environment.contains(SecurityManager.ENV_AUTH_SECRET))

    // auth is set to true
    conf.set(SecurityManager.SPARK_AUTH_CONF, "true")
    cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(cmd.environment(SecurityManager.ENV_AUTH_SECRET) === secret)
  }
} 
Example 22
Source File: LogPageSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.worker.ui

import java.io.{File, FileWriter}

import org.mockito.Mockito.{mock, when}
import org.scalatest.PrivateMethodTester

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.worker.Worker

class LogPageSuite extends SparkFunSuite with PrivateMethodTester {

  test("get logs simple") {
    val webui = mock(classOf[WorkerWebUI])
    val worker = mock(classOf[Worker])
    val tmpDir = new File(sys.props("java.io.tmpdir"))
    val workDir = new File(tmpDir, "work-dir")
    workDir.mkdir()
    when(webui.workDir).thenReturn(workDir)
    when(webui.worker).thenReturn(worker)
    when(worker.conf).thenReturn(new SparkConf())
    val logPage = new LogPage(webui)

    // Prepare some fake log files to read later
    val out = "some stdout here"
    val err = "some stderr here"
    val tmpOut = new File(workDir, "stdout")
    val tmpErr = new File(workDir, "stderr")
    val tmpErrBad = new File(tmpDir, "stderr") // outside the working directory
    val tmpOutBad = new File(tmpDir, "stdout")
    val tmpRand = new File(workDir, "random")
    write(tmpOut, out)
    write(tmpErr, err)
    write(tmpOutBad, out)
    write(tmpErrBad, err)
    write(tmpRand, "1 6 4 5 2 7 8")

    // Get the logs. All log types other than "stderr" or "stdout" will be rejected
    val getLog = PrivateMethod[(String, Long, Long, Long)]('getLog)
    val (stdout, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "stdout", None, 100)
    val (stderr, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "stderr", None, 100)
    val (error1, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "random", None, 100)
    val (error2, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "does-not-exist.txt", None, 100)
    // These files exist, but live outside the working directory
    val (error3, _, _, _) =
      logPage invokePrivate getLog(tmpDir.getAbsolutePath, "stderr", None, 100)
    val (error4, _, _, _) =
      logPage invokePrivate getLog(tmpDir.getAbsolutePath, "stdout", None, 100)
    assert(stdout === out)
    assert(stderr === err)
    assert(error1.startsWith("Error: Log type must be one of "))
    assert(error2.startsWith("Error: Log type must be one of "))
    assert(error3.startsWith("Error: invalid log directory"))
    assert(error4.startsWith("Error: invalid log directory"))
  }

  
  private def write(f: File, s: String): Unit = {
    val writer = new FileWriter(f)
    try {
      writer.write(s)
    } finally {
      writer.close()
    }
  }

} 
Example 23
Source File: PixelImageAccessModes.scala    From scalismo-faces   with Apache License 2.0 5 votes vote down vote up
package scalismo.faces.image

import scalismo.faces.FacesTestSuite
import scalismo.faces.color._
import scalismo.faces.image.AccessMode._
import scalismo.faces.image.PixelImage.implicits._
import org.scalatest.PrivateMethodTester
import scalismo.color.RGB

class PixelImageAccessModes extends FacesTestSuite with PrivateMethodTester {

  def imageDiff(i1: PixelImage[RGB], i2: PixelImage[RGB]): Double = (i1 - i2).norm

  describe("A PixelImage with access modes") {
    val image = PixelImage(10, 10, (x, y) => randomRGB)

    it("supports strict access mode") {
      val direct = image.withAccessMode(Strict())
      direct(0, 2) shouldBe image.valueAt(0, 2)
      direct(3, 7) shouldBe image.valueAt(3, 7)
      intercept[Exception](direct(-1, 1))
      intercept[Exception](direct(1, -1))
      intercept[Exception](direct(10, 1))
      intercept[Exception](direct(1, 10))
    }

    it("supports repeated access") {
      val repeat = image.withAccessMode(Repeat())
      repeat(3, 7) shouldBe image.valueAt(3, 7)
      repeat(3, 11) shouldBe image(3, 9)
      repeat(-2, 1) shouldBe image(0, 1)
      repeat(-200, 1) shouldBe image(0, 1)
      repeat(-2, 1) shouldBe image(0, 1)
    }

    it("supports mirrored access") {
      val mirror = image.withAccessMode(Mirror())
      mirror(3, 7) shouldBe image.valueAt(3, 7)
      mirror(3, 10) shouldBe image(3, 9)
      mirror(-2, 1) shouldBe image(1, 1)
      mirror(-200, 1) shouldBe image(0, 1)
      mirror(7, 10) shouldBe image(7, 9)
      mirror(7, -2) shouldBe image(7, 1)
    }

    it("supports periodic access") {
      val periodic = image.withAccessMode(Periodic())
      periodic(3, 7) shouldBe image.valueAt(3, 7)
      periodic(3, 10) shouldBe image(3, 0)
      periodic(-2, 1) shouldBe image(8, 1)
      periodic(9, -3) shouldBe image(9, 7)
      periodic(-200, 1) shouldBe image(0, 1)
    }

    it("supports padded access") {
      val padded = image.withAccessMode(Padded(RGB.White))
      padded(3, 7) shouldBe image.valueAt(3, 7)
      padded(3, 10) shouldBe RGB.White
      padded(-2, 1) shouldBe RGB.White
      padded(-200, 1) shouldBe RGB.White
    }

    it("buffering keeps access mode active") {
      val buffered = image.withAccessMode(Padded(RGB.White))
      buffered(-2, 1) shouldBe RGB.White
    }

    it("buffering with an IndexedSeq keep access mode active") {
      val buffered = image.withAccessMode(Padded(RGB.White)).buffer
      buffered(-2, 1) shouldBe RGB.White
    }
  }
} 
Example 24
Source File: CorrelationFilterTest.scala    From scalismo-faces   with Apache License 2.0 5 votes vote down vote up
package scalismo.faces.image

import scalismo.faces.FacesTestSuite
import scalismo.faces.image.filter.{Gradient, SeparableCorrelationFilter}
import org.scalatest.PrivateMethodTester

import scala.collection.IndexedSeq

class CorrelationFilterTest extends FacesTestSuite with PrivateMethodTester {

  describe("A PixelImage filtered with kernels") {

    it("is consistent when filtered with SobelX") {
      val testImg = PixelImage(ColumnMajorImageDomain(3, 3), Array(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0)).withAccessMode(AccessMode.Repeat())
      val Sx = Gradient.sobelX[Double]
      val filtered: PixelImage[Double] = Sx.filter(testImg.withAccessMode(AccessMode.Repeat()))
      filtered.values.toIndexedSeq should be(IndexedSeq(12.0, 12.0, 12.0, 24.0, 24.0, 24.0, 12.0, 12.0, 12.0))
    }

    it("is consistent when filtered with SobelY") {
      val testImg = PixelImage(ColumnMajorImageDomain(3, 3), Array(1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0)).withAccessMode(AccessMode.Repeat())
      val Sx = Gradient.sobelY[Double]
      val filtered: PixelImage[Double] = Sx.filter(testImg.withAccessMode(AccessMode.Repeat()))
      filtered.transposed.values.toIndexedSeq should be(IndexedSeq(4.0, 4.0, 4.0, 8.0, 8.0, 8.0, 4.0, 4.0, 4.0))
    }

    it("yields a symmetric image (I==I.t) for a separable kernel (regression test for access modes in separable filters)") {
      val checkers = PixelImage(15, 15, (x,y) => if((x+y)%2==0) 1.0 else 0.0 )
      val blurKernel = PixelImage(PixelImageDomain(3, 1), IndexedSeq(0.25, 0.5, 0.25))
      val blurred = checkers.filter(SeparableCorrelationFilter(blurKernel, blurKernel.transposed))
      val diff = blurred.transposed.zip(blurred).map{case(b,c) => math.pow(b - c, 2)}
      diff.values.sum should be < 1e-10
    }

  }
} 
Example 25
Source File: KafkaProducerCacheSpec.scala    From spark-kafka-writer   with Apache License 2.0 5 votes vote down vote up
package com.github.benfradet.spark.kafka.writer

import com.google.common.cache.Cache
import org.apache.kafka.clients.producer._
import org.apache.kafka.common.serialization.StringSerializer
import org.scalatest.PrivateMethodTester

import scala.concurrent.duration._

class KafkaProducerCacheSpec extends SKRSpec with PrivateMethodTester {
  val cache = PrivateMethod[Cache[Seq[(String, Object)], KafkaProducer[_, _]]]('cache)
  val m1 = Map(
    "bootstrap.servers" -> "127.0.0.1:9092",
    "key.serializer" -> classOf[StringSerializer].getName,
    "value.serializer" -> classOf[StringSerializer].getName
  )
  val m2 = m1 + ("acks" -> "0")

  override def beforeAll(): Unit = {
    super.beforeAll()
    KafkaProducerCache.invokePrivate(cache()).invalidateAll()
  }

  "A KafkaProducerCache" when {
    "calling getProducer" should {
      "create the producer if it doesn't exist and retrieve it if it exists" in {
        cacheSize shouldBe 0
        val p1 = KafkaProducerCache.getProducer[String, String](m1)
        cacheSize shouldBe 1
        val p2 = KafkaProducerCache.getProducer[String, String](m1)
        p1 shouldBe p2
        cacheSize shouldBe 1
      }
    }

    "closing a producer" should {
      "close the correct producer" in {
        cacheSize shouldBe 1
        val p1 = KafkaProducerCache.getProducer[String, String](m1)
        cacheSize shouldBe 1
        val p2 = KafkaProducerCache.getProducer[String, String](m2)
        cacheSize shouldBe 2
        p1 should not be p2
        KafkaProducerCache.close(m1)
        cacheSize shouldBe 1
      }
    }
  }

  private def cacheSize: Int = KafkaProducerCache.invokePrivate(cache()).asMap.size
} 
Example 26
Source File: LineageHelperSpec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.provider

import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.model.headers.RawHeader
import com.ing.wbaa.rokku.proxy.config.KafkaSettings
import com.ing.wbaa.rokku.proxy.data.{ BucketClassification, DirClassification, ObjectClassification, RequestId }
import com.ing.wbaa.rokku.proxy.provider.atlas.LineageHelpers
import org.scalatest.PrivateMethodTester
import org.scalatest.diagrams.Diagrams
import org.scalatest.wordspec.AnyWordSpec

import scala.concurrent.ExecutionContext

class LineageHelperSpec extends AnyWordSpec with Diagrams with PrivateMethodTester {

  object LineageHelpersTest extends LineageHelpers {
    override protected[this] implicit val kafkaSettings: KafkaSettings = null
    override protected[this] implicit val executionContext: ExecutionContext = null
  }

  implicit val id = RequestId("1")

  "extractMetadataFromHeader" that {
    "return None for empty header" in {
      val result = LineageHelpersTest.extractMetadataHeader(None)
      assert(result.isEmpty)
    }

    "return None for wrong header" in {
      val result = LineageHelpersTest.extractMetadataHeader(Some("k,v"))
      assert(result.isEmpty)
      val result2 = LineageHelpersTest.extractMetadataHeader(Some("k=v,k2"))
      assert(result2.isEmpty)
      val result3 = LineageHelpersTest.extractMetadataHeader(Some("kv,=k2,v2"))
      assert(result3.isEmpty)
    }

    "return key and value for metadata header" in {
      val result = LineageHelpersTest.extractMetadataHeader(Some("k=v"))
      assert(result.contains(Map("k" -> "v")))
    }

    "return keys and values for metadata header" in {
      val result = LineageHelpersTest.extractMetadataHeader(Some("k1=v1,k2=v2"))
      assert(result.contains(Map("k1" -> "v1", "k2" -> "v2")))
    }
  }

  "extractClassifications" that {
    "returns bucket classifications" in {
      val request = HttpRequest().withUri("bucket").withHeaders(RawHeader(LineageHelpersTest.CLASSIFICATIONS_HEADER, "classification1"))
      val result = LineageHelpersTest.extractClassifications(request)
      assert(result.size == 1)
      assert(result contains BucketClassification())
      assert(result(BucketClassification()) == List("classification1"))
    }

    "returns dir classifications" in {
      val request = HttpRequest().withUri("bucket/dir1/").withHeaders(RawHeader(LineageHelpersTest.CLASSIFICATIONS_HEADER, "classification1,classification2"))
      val result = LineageHelpersTest.extractClassifications(request)
      assert(result.size == 1)
      assert(result contains DirClassification())
      assert(result(DirClassification()) == List("classification1", "classification2"))
    }

    "returns object classifications" in {
      val request = HttpRequest().withUri("bucket/obj").withHeaders(RawHeader(LineageHelpersTest.CLASSIFICATIONS_HEADER, "classification1,classification2,classification3"))
      val result = LineageHelpersTest.extractClassifications(request)
      assert(result.size == 1)
      assert(result contains ObjectClassification())
      assert(result(ObjectClassification()) == List("classification1", "classification2", "classification3"))
      val request2 = HttpRequest().withUri("bucket/dir1/obj").withHeaders(RawHeader(LineageHelpersTest.CLASSIFICATIONS_HEADER, "classification1"))
      val result2 = LineageHelpersTest.extractClassifications(request2)
      assert(result2.size == 1)
      assert(result2 contains ObjectClassification())
      assert(result2(ObjectClassification()) == List("classification1"))
    }
  }

} 
Example 27
Source File: ExtendableOptimizerSuite.scala    From HANAVora-Extensions   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.extension

import org.apache.spark.sql.catalyst.optimizer.{FiltersReduction, Optimizer}
import org.apache.spark.sql.extension.OptimizerFactory.ExtendableOptimizerBatch
import org.scalatest.{FunSuite, PrivateMethodTester}

class ExtendableOptimizerSuite extends FunSuite with PrivateMethodTester {

  implicit class OptimizerOps(opt: Optimizer) {
    private val nameMethod = PrivateMethod[String]('name)
    private def batches: Seq[AnyRef] = {
      
      val clazz = opt.getClass
      val batchesMethod = clazz.getMethods.find(_.getName == "batches").get
      batchesMethod.setAccessible(true)
      batchesMethod.invoke(opt).asInstanceOf[Seq[AnyRef]]
    }
    def batchNames: Seq[String] =
      batches map { b => b invokePrivate nameMethod() }
  }

  test("No rules is equivalent to DefaultOptimizer") {
    val extOpt = OptimizerFactory.produce()
    val defOpt = OptimizerFactoryForTests.default()
    assert(extOpt.batchNames == defOpt.batchNames)
  }

  test("One early batch is added before the main optimizer batch") {
    val extOpt = OptimizerFactory.produce(
      earlyBatches = ExtendableOptimizerBatch("FOO", 1, FiltersReduction :: Nil) :: Nil
    )

    assert(extOpt.batchNames match {
      case subQueries :: early :: other => early.equals("FOO")
    })
  }

  test("Several early batches are added before the main optimizer batch") {
    val extOpt = OptimizerFactory.produce(
      earlyBatches = ExtendableOptimizerBatch("FOO", 1, FiltersReduction :: Nil) ::
        ExtendableOptimizerBatch("BAR", 1, FiltersReduction :: Nil) ::
        Nil
    )

    assert(extOpt.batchNames match {
      case subQueries :: firstEarly :: secondEarly :: other =>
        firstEarly.equals("FOO") && secondEarly.equals("BAR")
    })
  }

  test("Expression rules are added") {
    val extOpt = OptimizerFactory.produce(
      mainBatchRules = FiltersReduction :: Nil
    )
    val defOpt = OptimizerFactoryForTests.default()
    assert(extOpt.batchNames == defOpt.batchNames)
  }

  test("Both rules are added") {
    val extOpt = OptimizerFactory.produce(
      earlyBatches = ExtendableOptimizerBatch("FOO", 1, FiltersReduction :: Nil) :: Nil,
      mainBatchRules = FiltersReduction :: Nil
    )
    val defOpt = OptimizerFactoryForTests.default()
    assert(extOpt.batchNames.toSet ==
      defOpt.batchNames.toSet ++ Seq("FOO"))
  }
} 
Example 28
Source File: HDFSCredentialProviderSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.yarn.security

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.scalatest.{Matchers, PrivateMethodTester}

import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}

class HDFSCredentialProviderSuite
    extends SparkFunSuite
    with PrivateMethodTester
    with Matchers {
  private val _getTokenRenewer = PrivateMethod[String]('getTokenRenewer)

  private def getTokenRenewer(
      hdfsCredentialProvider: HDFSCredentialProvider, conf: Configuration): String = {
    hdfsCredentialProvider invokePrivate _getTokenRenewer(conf)
  }

  private var hdfsCredentialProvider: HDFSCredentialProvider = null

  override def beforeAll() {
    super.beforeAll()

    if (hdfsCredentialProvider == null) {
      hdfsCredentialProvider = new HDFSCredentialProvider()
    }
  }

  override def afterAll() {
    if (hdfsCredentialProvider != null) {
      hdfsCredentialProvider = null
    }

    super.afterAll()
  }

  test("check token renewer") {
    val hadoopConf = new Configuration()
    hadoopConf.set("yarn.resourcemanager.address", "myrm:8033")
    hadoopConf.set("yarn.resourcemanager.principal", "yarn/myrm:[email protected]")
    val renewer = getTokenRenewer(hdfsCredentialProvider, hadoopConf)
    renewer should be ("yarn/myrm:[email protected]")
  }

  test("check token renewer default") {
    val hadoopConf = new Configuration()
    val caught =
      intercept[SparkException] {
        getTokenRenewer(hdfsCredentialProvider, hadoopConf)
      }
    assert(caught.getMessage === "Can't get Master Kerberos principal for use as renewer")
  }
} 
Example 29
Source File: HDFSCredentialProviderSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.yarn.security

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.scalatest.{Matchers, PrivateMethodTester}

import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}

class HDFSCredentialProviderSuite
    extends SparkFunSuite
    with PrivateMethodTester
    with Matchers {
  private val _getTokenRenewer = PrivateMethod[String]('getTokenRenewer)

  private def getTokenRenewer(
      hdfsCredentialProvider: HDFSCredentialProvider, conf: Configuration): String = {
    hdfsCredentialProvider invokePrivate _getTokenRenewer(conf)
  }

  private var hdfsCredentialProvider: HDFSCredentialProvider = null

  override def beforeAll() {
    super.beforeAll()

    if (hdfsCredentialProvider == null) {
      hdfsCredentialProvider = new HDFSCredentialProvider()
    }
  }

  override def afterAll() {
    if (hdfsCredentialProvider != null) {
      hdfsCredentialProvider = null
    }

    super.afterAll()
  }

  test("check token renewer") {
    val hadoopConf = new Configuration()
    hadoopConf.set("yarn.resourcemanager.address", "myrm:8033")
    hadoopConf.set("yarn.resourcemanager.principal", "yarn/myrm:[email protected]")
    val renewer = getTokenRenewer(hdfsCredentialProvider, hadoopConf)
    renewer should be ("yarn/myrm:[email protected]")
  }

  test("check token renewer default") {
    val hadoopConf = new Configuration()
    val caught =
      intercept[SparkException] {
        getTokenRenewer(hdfsCredentialProvider, hadoopConf)
      }
    assert(caught.getMessage === "Can't get Master Kerberos principal for use as renewer")
  }
} 
Example 30
Source File: RecurringTimerSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.streaming.util

import java.util.concurrent.ConcurrentLinkedQueue

import scala.collection.JavaConverters._
import scala.concurrent.duration._

import org.scalatest.PrivateMethodTester
import org.scalatest.concurrent.Eventually._

import org.apache.spark.SparkFunSuite
import org.apache.spark.util.ManualClock

class RecurringTimerSuite extends SparkFunSuite with PrivateMethodTester {

  test("basic") {
    val clock = new ManualClock()
    val results = new ConcurrentLinkedQueue[Long]()
    val timer = new RecurringTimer(clock, 100, time => {
      results.add(time)
    }, "RecurringTimerSuite-basic")
    timer.start(0)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L))
    }
    clock.advance(100)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L, 100L))
    }
    clock.advance(200)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L, 100L, 200L, 300L))
    }
    assert(timer.stop(interruptTimer = true) === 300L)
  }

  test("SPARK-10224: call 'callback' after stopping") {
    val clock = new ManualClock()
    val results = new ConcurrentLinkedQueue[Long]
    val timer = new RecurringTimer(clock, 100, time => {
      results.add(time)
    }, "RecurringTimerSuite-SPARK-10224")
    timer.start(0)
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(results.asScala.toSeq === Seq(0L))
    }
    @volatile var lastTime = -1L
    // Now RecurringTimer is waiting for the next interval
    val thread = new Thread {
      override def run(): Unit = {
        lastTime = timer.stop(interruptTimer = false)
      }
    }
    thread.start()
    val stopped = PrivateMethod[RecurringTimer]('stopped)
    // Make sure the `stopped` field has been changed
    eventually(timeout(10.seconds), interval(10.millis)) {
      assert(timer.invokePrivate(stopped()) === true)
    }
    clock.advance(200)
    // When RecurringTimer is awake from clock.waitTillTime, it will call `callback` once.
    // Then it will find `stopped` is true and exit the loop, but it should call `callback` again
    // before exiting its internal thread.
    thread.join()
    assert(results.asScala.toSeq === Seq(0L, 100L, 200L))
    assert(lastTime === 200L)
  }
} 
Example 31
Source File: CommandUtilsSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.worker

import org.scalatest.{Matchers, PrivateMethodTester}

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.Command
import org.apache.spark.util.Utils

class CommandUtilsSuite extends SparkFunSuite with Matchers with PrivateMethodTester {

  test("set libraryPath correctly") {
    val appId = "12345-worker321-9876"
    val sparkHome = sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!"))
    val cmd = new Command("mainClass", Seq(), Map(), Seq(), Seq("libraryPathToB"), Seq())
    val builder = CommandUtils.buildProcessBuilder(
      cmd, new SecurityManager(new SparkConf), 512, sparkHome, t => t)
    val libraryPath = Utils.libraryPathEnvName
    val env = builder.environment
    env.keySet should contain(libraryPath)
    assert(env.get(libraryPath).startsWith("libraryPathToB"))
  }

  test("auth secret shouldn't appear in java opts") {
    val buildLocalCommand = PrivateMethod[Command]('buildLocalCommand)
    val conf = new SparkConf
    val secret = "This is the secret sauce"
    // set auth secret
    conf.set(SecurityManager.SPARK_AUTH_SECRET_CONF, secret)
    val command = new Command("mainClass", Seq(), Map(), Seq(), Seq("lib"),
      Seq("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF + "=" + secret))

    // auth is not set
    var cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(!cmd.environment.contains(SecurityManager.ENV_AUTH_SECRET))

    // auth is set to false
    conf.set(SecurityManager.SPARK_AUTH_CONF, "false")
    cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(!cmd.environment.contains(SecurityManager.ENV_AUTH_SECRET))

    // auth is set to true
    conf.set(SecurityManager.SPARK_AUTH_CONF, "true")
    cmd = CommandUtils invokePrivate buildLocalCommand(
      command, new SecurityManager(conf), (t: String) => t, Seq(), Map())
    assert(!cmd.javaOpts.exists(_.startsWith("-D" + SecurityManager.SPARK_AUTH_SECRET_CONF)))
    assert(cmd.environment(SecurityManager.ENV_AUTH_SECRET) === secret)
  }
} 
Example 32
Source File: LogPageSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.worker.ui

import java.io.{File, FileWriter}

import org.mockito.Mockito.{mock, when}
import org.scalatest.PrivateMethodTester

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.worker.Worker

class LogPageSuite extends SparkFunSuite with PrivateMethodTester {

  test("get logs simple") {
    val webui = mock(classOf[WorkerWebUI])
    val worker = mock(classOf[Worker])
    val tmpDir = new File(sys.props("java.io.tmpdir"))
    val workDir = new File(tmpDir, "work-dir")
    workDir.mkdir()
    when(webui.workDir).thenReturn(workDir)
    when(webui.worker).thenReturn(worker)
    when(worker.conf).thenReturn(new SparkConf())
    val logPage = new LogPage(webui)

    // Prepare some fake log files to read later
    val out = "some stdout here"
    val err = "some stderr here"
    val tmpOut = new File(workDir, "stdout")
    val tmpErr = new File(workDir, "stderr")
    val tmpErrBad = new File(tmpDir, "stderr") // outside the working directory
    val tmpOutBad = new File(tmpDir, "stdout")
    val tmpRand = new File(workDir, "random")
    write(tmpOut, out)
    write(tmpErr, err)
    write(tmpOutBad, out)
    write(tmpErrBad, err)
    write(tmpRand, "1 6 4 5 2 7 8")

    // Get the logs. All log types other than "stderr" or "stdout" will be rejected
    val getLog = PrivateMethod[(String, Long, Long, Long)]('getLog)
    val (stdout, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "stdout", None, 100)
    val (stderr, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "stderr", None, 100)
    val (error1, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "random", None, 100)
    val (error2, _, _, _) =
      logPage invokePrivate getLog(workDir.getAbsolutePath, "does-not-exist.txt", None, 100)
    // These files exist, but live outside the working directory
    val (error3, _, _, _) =
      logPage invokePrivate getLog(tmpDir.getAbsolutePath, "stderr", None, 100)
    val (error4, _, _, _) =
      logPage invokePrivate getLog(tmpDir.getAbsolutePath, "stdout", None, 100)
    assert(stdout === out)
    assert(stderr === err)
    assert(error1.startsWith("Error: Log type must be one of "))
    assert(error2.startsWith("Error: Log type must be one of "))
    assert(error3.startsWith("Error: invalid log directory"))
    assert(error4.startsWith("Error: invalid log directory"))
  }

  
  private def write(f: File, s: String): Unit = {
    val writer = new FileWriter(f)
    try {
      writer.write(s)
    } finally {
      writer.close()
    }
  }

} 
Example 33
Source File: SparkRecoverPartitionsCustomTest.scala    From m3d-engine   with Apache License 2.0 5 votes vote down vote up
package com.adidas.analytics.unit

import com.adidas.analytics.util.SparkRecoverPartitionsCustom
import com.adidas.utils.SparkSessionWrapper
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{Dataset, Row}
import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers, PrivateMethodTester}

import scala.collection.JavaConverters._

class SparkRecoverPartitionsCustomTest extends FunSuite
  with SparkSessionWrapper
  with PrivateMethodTester
  with Matchers
  with BeforeAndAfterAll{

  test("test conversion of String Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = SparkRecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    val result = customSparkRecoverPartitions invokePrivate createParameterValue("theValue")

    result should be("'theValue'")
  }

  test("test conversion of Short Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = SparkRecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    val result = customSparkRecoverPartitions invokePrivate createParameterValue(java.lang.Short.valueOf("2"))

    result should be("2")
  }

  test("test conversion of Integer Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = SparkRecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    val result = customSparkRecoverPartitions invokePrivate createParameterValue(java.lang.Integer.valueOf("4"))

    result should be("4")
  }

  test("test conversion of null Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = SparkRecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    an [Exception] should be thrownBy {
      customSparkRecoverPartitions invokePrivate createParameterValue(null)
    }
  }

  test("test conversion of not supported Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = SparkRecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    an [Exception] should be thrownBy {
      customSparkRecoverPartitions invokePrivate createParameterValue(false)
    }
  }

  test("test HiveQL statements Generation") {
    val customSparkRecoverPartitions = SparkRecoverPartitionsCustom(
      tableName="test",
      targetPartitions = Seq("country","district")
    )

    val rowsInput = Seq(
      Row(1, "portugal", "porto"),
      Row(2, "germany", "herzogenaurach"),
      Row(3, "portugal", "coimbra")
    )

    val inputSchema = StructType(
      List(
        StructField("number", IntegerType, nullable = true),
        StructField("country", StringType, nullable = true),
        StructField("district", StringType, nullable = true)
      )
    )

    val expectedStatements: Seq[String] = Seq(
      "ALTER TABLE test ADD IF NOT EXISTS PARTITION(country='portugal',district='porto')",
      "ALTER TABLE test ADD IF NOT EXISTS PARTITION(country='germany',district='herzogenaurach')",
      "ALTER TABLE test ADD IF NOT EXISTS PARTITION(country='portugal',district='coimbra')"
    )

    val testDataset: Dataset[Row] = spark.createDataset(rowsInput)(RowEncoder(inputSchema))

    val createParameterValue = PrivateMethod[Dataset[String]]('generateAddPartitionStatements)

    val producedStatements: Seq[String] = (customSparkRecoverPartitions invokePrivate createParameterValue(testDataset))
      .collectAsList()
      .asScala

    expectedStatements.sorted.toSet should equal(producedStatements.sorted.toSet)
  }

  override def afterAll(): Unit = {
    spark.stop()
  }

} 
Example 34
Source File: RecoverPartitionsCustomTest.scala    From m3d-engine   with Apache License 2.0 5 votes vote down vote up
package com.adidas.analytics.unit

import com.adidas.analytics.util.RecoverPartitionsCustom
import com.adidas.utils.SparkSessionWrapper
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.apache.spark.sql.{Dataset, Row}
import org.scalatest.{BeforeAndAfterAll, FunSuite, Matchers, PrivateMethodTester}

import scala.collection.JavaConverters._

class RecoverPartitionsCustomTest extends FunSuite
  with SparkSessionWrapper
  with PrivateMethodTester
  with Matchers
  with BeforeAndAfterAll{

  test("test conversion of String Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = RecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    val result = customSparkRecoverPartitions invokePrivate createParameterValue("theValue")

    result should be("'theValue'")
  }

  test("test conversion of Short Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = RecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    val result = customSparkRecoverPartitions invokePrivate createParameterValue(java.lang.Short.valueOf("2"))

    result should be("2")
  }

  test("test conversion of Integer Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = RecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    val result = customSparkRecoverPartitions invokePrivate createParameterValue(java.lang.Integer.valueOf("4"))

    result should be("4")
  }

  test("test conversion of null Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = RecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    an [Exception] should be thrownBy {
      customSparkRecoverPartitions invokePrivate createParameterValue(null)
    }
  }

  test("test conversion of not supported Value to HiveQL Partition Parameter") {
    val customSparkRecoverPartitions = RecoverPartitionsCustom(tableName="", targetPartitions = Seq())
    val createParameterValue = PrivateMethod[String]('createParameterValue)
    an [Exception] should be thrownBy {
      customSparkRecoverPartitions invokePrivate createParameterValue(false)
    }
  }

  test("test HiveQL statements Generation") {
    val customSparkRecoverPartitions = RecoverPartitionsCustom(
      tableName="test",
      targetPartitions = Seq("country","district")
    )

    val rowsInput = Seq(
      Row(1, "portugal", "porto"),
      Row(2, "germany", "herzogenaurach"),
      Row(3, "portugal", "coimbra")
    )

    val inputSchema = StructType(
      List(
        StructField("number", IntegerType, nullable = true),
        StructField("country", StringType, nullable = true),
        StructField("district", StringType, nullable = true)
      )
    )

    val expectedStatements: Seq[String] = Seq(
      "ALTER TABLE test ADD IF NOT EXISTS PARTITION(country='portugal',district='porto')",
      "ALTER TABLE test ADD IF NOT EXISTS PARTITION(country='germany',district='herzogenaurach')",
      "ALTER TABLE test ADD IF NOT EXISTS PARTITION(country='portugal',district='coimbra')"
    )

    val testDataset: Dataset[Row] = spark.createDataset(rowsInput)(RowEncoder(inputSchema))

    val createParameterValue = PrivateMethod[Dataset[String]]('generateAddPartitionStatements)

    val producedStatements: Seq[String] = (customSparkRecoverPartitions invokePrivate createParameterValue(testDataset))
      .collectAsList()
      .asScala

    expectedStatements.sorted.toSet should equal(producedStatements.sorted.toSet)
  }

  override def afterAll(): Unit = {
    spark.stop()
  }

} 
Example 35
Source File: RemoraDatadogReporterSpec.scala    From remora   with MIT License 5 votes vote down vote up
package reporter


import com.codahale.metrics.{Metric, MetricFilter, MetricRegistry}
import config.DataDog
import org.coursera.metrics.datadog.MetricNameFormatter
import org.scalamock.scalatest.MockFactory
import org.scalatest.{FlatSpec, Matchers, PrivateMethodTester}

class RemoraDatadogReporterSpec extends FlatSpec with Matchers with PrivateMethodTester with MockFactory {

  private val metricRegistry: MetricRegistry = new MetricRegistry
  private val metric: Metric = mock[Metric]
  private val config = DataDog(enabled = true, "test", 1, "localhost", 8125, List.empty, removeTagsFromMetricName = false)
  private val configRemoveTags = DataDog(enabled = true, "test", 1, "localhost", 8125, List.empty, removeTagsFromMetricName = true)

  "Metrics filter" should "match any metric when no filter is given" in {
    val filter = buildMetricFilter(List.empty)

    filter.matches("any_metrics_name", metric) should be(true)
  }

  it should "match metric containing consumer group name" in {
    val kafkaConsumerGroupName = "test-consumer1"
    val filter = buildMetricFilter(List(kafkaConsumerGroupName))

    filter.matches(s"metric-name-$kafkaConsumerGroupName", metric) should be(true)
  }

  it should "not match metric containing consumer group name" in {
    val filter = buildMetricFilter(List("test-consumer1"))

    filter.matches("some-metrics", metric) should be(false)
  }

  "Metric name formatter" should "add tag information if metric is well formatted" in {
    val formatter = getMetricNameFormatter(config)

    formatter.format(s"${config.name}.gauge.test.1.test-consumer.lag") should be(s"${config.name}.gauge.test.1.test-consumer.lag[topic:test,group:test-consumer,partition:1]")
  }

  it should "not add partition tag information if no partition" in {
    val formatter = getMetricNameFormatter(config)

    formatter.format(s"${config.name}.gauge.test-topic.test-consumer.totalLag") should be(s"${config.name}.gauge.test-topic.test-consumer.totalLag[topic:test-topic,group:test-consumer]")
  }

  it should "not add tag information otherwise" in {
    val formatter = getMetricNameFormatter(config)

    formatter.format(s"${config.name}.gauge.test_1_faulty_test-consumer__lag") should be(s"${config.name}.gauge.test_1_faulty_test-consumer__lag")
  }

  "Metric name formatter without tags" should "add tag information if metric is well formatted" in {
    val formatter = getMetricNameFormatter(configRemoveTags)

    formatter.format(s"${configRemoveTags.name}.gauge.test.1.test-consumer.lag") should be(s"${configRemoveTags.name}.gauge.lag[topic:test,group:test-consumer,partition:1]")
  }

  it should "not add partition tag information if no partition" in {
    val formatter = getMetricNameFormatter(configRemoveTags)

    formatter.format(s"${configRemoveTags.name}.gauge.test-topic.test-consumer.totalLag") should be(s"${configRemoveTags.name}.gauge.totalLag[topic:test-topic,group:test-consumer]")
  }

  private def buildMetricFilter(kafkaConsumerList: List[String], removeTags: Boolean = false): MetricFilter = {
    val config = DataDog(enabled = true, "test", 1, "localhost", 8125, kafkaConsumerList, removeTags)
    val reporter = new RemoraDatadogReporter(metricRegistry, config)
    reporter invokePrivate PrivateMethod[MetricFilter]('kafkaConsumerGroupFilter)()
  }

  private def getMetricNameFormatter(config: DataDog): MetricNameFormatter = {
    val reporter = new RemoraDatadogReporter(metricRegistry, config)
    reporter invokePrivate PrivateMethod[MetricNameFormatter]('metricNameFormatter)(config.removeTagsFromMetricName)
  }
} 
Example 36
Source File: HDFSCredentialProviderSuite.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.yarn.security

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.scalatest.{Matchers, PrivateMethodTester}

import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}

class HDFSCredentialProviderSuite
    extends SparkFunSuite
    with PrivateMethodTester
    with Matchers {
  private val _getTokenRenewer = PrivateMethod[String]('getTokenRenewer)

  private def getTokenRenewer(
      hdfsCredentialProvider: HDFSCredentialProvider, conf: Configuration): String = {
    hdfsCredentialProvider invokePrivate _getTokenRenewer(conf)
  }

  private var hdfsCredentialProvider: HDFSCredentialProvider = null

  override def beforeAll() {
    super.beforeAll()

    if (hdfsCredentialProvider == null) {
      hdfsCredentialProvider = new HDFSCredentialProvider()
    }
  }

  override def afterAll() {
    if (hdfsCredentialProvider != null) {
      hdfsCredentialProvider = null
    }

    super.afterAll()
  }

  test("check token renewer") {
    val hadoopConf = new Configuration()
    hadoopConf.set("yarn.resourcemanager.address", "myrm:8033")
    hadoopConf.set("yarn.resourcemanager.principal", "yarn/myrm:[email protected]")
    val renewer = getTokenRenewer(hdfsCredentialProvider, hadoopConf)
    renewer should be ("yarn/myrm:[email protected]")
  }

  test("check token renewer default") {
    val hadoopConf = new Configuration()
    val caught =
      intercept[SparkException] {
        getTokenRenewer(hdfsCredentialProvider, hadoopConf)
      }
    assert(caught.getMessage === "Can't get Master Kerberos principal for use as renewer")
  }
}