org.scalatest.prop.Checkers Scala Examples

The following examples show how to use org.scalatest.prop.Checkers. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ArgonautSpec.scala    From xenomorph   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package xenomorph.argonaut

import org.scalacheck._
import org.scalatest.FunSuite
import org.scalatest.prop.Checkers
import xenomorph.samples._

class ArgonautSpec extends FunSuite with Checkers {

  import xenomorph.scalacheck.ToGen._
  import xenomorph.scalacheck.Implicits._
  import xenomorph.argonaut.Implicits._
  import xenomorph.argonaut.ToJson._
  import xenomorph.argonaut.FromJson._

  test("A value should serialise to JSON") {
    val result = Person.schema.toJson(person)
    assert(result.toString == """{"roles":[{"administrator":{"subordinateCount":0,"department":"windmill-tilting"}}],"birthDate":20147028000,"name":"Kris Nuttycombe"}""")
  }

  test("A value should be deserialised from JSON"){
    val result = Person.schema.toJson(person)
    assert(Person.schema.fromJson(result).toOption == Some(person))
  }


  test("Serialization should round-trip values produced by a generator"){
    implicit val arbPerson : Arbitrary[Person] = Arbitrary(Person.schema.toGen)
    check{
      (p: Person) =>
        Person.schema.fromJson(Person.schema.toJson(p)).toOption == Some(p)
    }
  }

} 
Example 2
Source File: ScodecSpec.scala    From xenomorph   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package xenomorph.scodec

import _root_.scodec.Attempt
import _root_.scodec.bits.BitVector
import org.scalacheck.Arbitrary
import org.scalatest.FunSuite
import org.scalatest.prop.Checkers
import xenomorph.Schema.Schema
import xenomorph.json.JType.JSchema
import xenomorph.samples.Person

class ScodecSpec extends FunSuite with Checkers {

  import xenomorph.scalacheck.ToGen._
  import xenomorph.scalacheck.Implicits._
  import xenomorph.scodec.ToEncoder._
  import xenomorph.scodec.ToDecoder._
  import xenomorph.scodec.Implicits._

  test("Serialization of values to binary should round-trip values produced by a generator"){

    val schema: Schema[JSchema, Person] = Person.schema
    implicit val arbPerson : Arbitrary[Person] = Arbitrary(schema.toGen)

    check(
      (p: Person) => {
        val res = for {
          enc <- schema.toEncoder.encode(p)
          dec <- schema.toDecoder.decode(enc)
        } yield dec

        (res.map(_.value) == Attempt.successful(p)) &&
          (res.map(_.remainder) == Attempt.successful(BitVector.empty))
      }
    )
  }

} 
Example 3
Source File: TreeSpec.scala    From bonsai   with MIT License 5 votes vote down vote up
package com.stripe.bonsai

import org.scalatest.{ WordSpec, Matchers }
import org.scalatest.prop.{ Checkers, PropertyChecks }

class TreeSpec extends WordSpec with Matchers with Checkers with PropertyChecks {
  val ops = TreeOps[Tree[Int], Int]

  def sumTree(g: Tree[Int]): Long =
    ops.reduce[Long](g)(_ + _.sum).getOrElse(0L)

  def treeElems(g: Tree[Int]): Set[Int] =
    ops.reduce[Set[Int]](g) { (n, as) =>
      as.foldLeft(Set(n))(_ | _)
    } getOrElse(Set.empty)

  def treeMin(g: Tree[Int]): Option[Int] =
    ops.reduce[Int](g) { (n, as) =>
      if (as.isEmpty) n else n min as.min
    }

  "Tree.apply" should {
    "copy structure of tree" in {
      forAll { (tree: GenericTree[Int]) =>
        GenericTree.fromTree(Tree(tree)) shouldBe Some(tree)
      }
    }

    "sum with tree ops" in {
      forAll { (x: Int, gtrees: List[GenericTree[Int]]) =>
        val trees = gtrees.map(Tree(_))
        val tree = Tree(GenericTree(x, gtrees))
        val expected = trees.foldLeft(x.toLong) { (t, n) => t + sumTree(n) }
        sumTree(tree) shouldBe expected
      }
    }

    "elems with tree ops" in {
      forAll { (x: Int, gtrees: List[GenericTree[Int]]) =>
        val trees = gtrees.map(Tree(_))
        val tree = Tree(GenericTree(x, gtrees))

        val elems = trees.foldLeft(Set(x))((t, n) => t | treeElems(n))
        treeElems(tree) shouldBe elems
        treeMin(tree) shouldBe Some(elems.min)
      }
    }
  }
} 
Example 4
Source File: ArraySetHashCheck.scala    From abc   with Apache License 2.0 5 votes vote down vote up
package com.rklaehn.abc

import algebra.{Eq, Order}
import cats.implicits._
import org.scalacheck.Arbitrary
import org.scalatest.FunSuite
import org.scalatest.prop.Checkers

class ArraySetHashCheck extends FunSuite with Checkers with Helpers {

  def checkHashing[T: Order: Hash: ClassTag: Arbitrary](): Unit = {
    val name = typeName[T]
    test(s"hashConsistency $name") {
      check { xs: Vector[T] ⇒
        Eq.eqv(ArraySet(xs: _*), ArraySet(xs.reverse: _*)) &&
        Hash.hash(ArraySet(xs: _*)) == Hash.hash(ArraySet(xs.reverse: _*))
      }
    }
  }
  checkHashing[Byte]()
  checkHashing[Short]()
  checkHashing[Int]()
  checkHashing[Long]()
  checkHashing[Float]()
  checkHashing[Double]()
  checkHashing[Boolean]()
  checkHashing[Char]()
  checkHashing[String]()
} 
Example 5
Source File: QuickCheckSuite.scala    From Principles-of-Reactive-Programming   with GNU General Public License v3.0 5 votes vote down vote up
package quickcheck

import org.scalatest.FunSuite

import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner

import org.scalatest.prop.Checkers
import org.scalacheck.Arbitrary._
import org.scalacheck.Prop
import org.scalacheck.Prop._

import org.scalatest.exceptions.TestFailedException

object QuickCheckBinomialHeap extends QuickCheckHeap with BinomialHeap

@RunWith(classOf[JUnitRunner])
class QuickCheckSuite extends FunSuite with Checkers {
  def checkBogus(p: Prop) {
    var ok = false
    try {
      check(p)
    } catch {
      case e: TestFailedException =>
        ok = true
    }
    assert(ok, "A bogus heap should NOT satisfy all properties. Try to find the bug!")
  }

  test("Binomial heap satisfies properties.") {
    check(new QuickCheckHeap with BinomialHeap)
  }

  test("Bogus (1) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus1BinomialHeap)
  }

  test("Bogus (2) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus2BinomialHeap)
  }

  test("Bogus (3) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus3BinomialHeap)
  }

  test("Bogus (4) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus4BinomialHeap)
  }

  test("Bogus (5) binomial heap does not satisfy properties.") {
    checkBogus(new QuickCheckHeap with Bogus5BinomialHeap)
  }
} 
Example 6
Source File: OAuth2Spec.scala    From finch-oauth2   with Apache License 2.0 5 votes vote down vote up
package io.finch.oauth2

import cats.effect.IO
import com.twitter.finagle.http.Status
import com.twitter.finagle.oauth2._
import com.twitter.util.Future
import io.finch._
import io.finch.catsEffect._
import org.mockito.Mockito._
import org.scalatest.{FlatSpec, Matchers}
import org.scalatest.mockito.MockitoSugar
import org.scalatest.prop.Checkers

class OAuth2Spec extends FlatSpec with Matchers with Checkers with MockitoSugar {

  behavior of "OAuth2"

  it should "authorize the requests" in {
    val at: AccessToken = AccessToken("foo", None, None, None, null)
    val dh: DataHandler[Int] = mock[DataHandler[Int]]
    val ai: AuthInfo[Int] = new AuthInfo(42, "foo", None, None)

    when(dh.findAccessToken("bar")).thenReturn(Future.value(Some(at)))
    when(dh.isAccessTokenExpired(at)).thenReturn(false)
    when(dh.findAuthInfoByAccessToken(at)).thenReturn(Future.value(Some(ai)))

    val authInfo: Endpoint[IO, AuthInfo[Int]] = authorize(dh)
    val e: Endpoint[IO, Int] = get("user" :: authInfo) { ai: AuthInfo[Int] =>
      Ok(ai.user)
    }

    val i1 = Input.get("/user", "access_token" -> "bar")
    val i2 = Input.get("/user")

    e(i1).awaitOutputUnsafe() shouldBe Some(Ok(42))
    val Some(error) = e(i2).awaitOutputUnsafe()
    error.status shouldBe Status.BadRequest
    error.headers should contain key "WWW-Authenticate"
  }

  it should "issue the access token" in {
    val dh: DataHandler[Int] = mock[DataHandler[Int]]
    val at: AccessToken = AccessToken("foobar", None, None, None, null)

    when(dh.validateClient("id", "", "password")).thenReturn(Future.value(true))
    when(dh.findUser("u", "p")).thenReturn(Future.value(Some(42)))
    when(dh.getStoredAccessToken(AuthInfo(42, "id", None, None))).thenReturn(Future.value(Some(at)))
    when(dh.isAccessTokenExpired(at)).thenReturn(false)

    val grandHandlerResult: Endpoint[IO, GrantResult] = issueAccessToken(dh)
    val e: Endpoint[IO, String] = get("token" :: grandHandlerResult) { ghr: GrantResult =>
      Ok(ghr.accessToken)
    }

    val i1 = Input.get("/token",
      "grant_type" -> "password", "username" -> "u", "password" -> "p", "client_id" -> "id"
    )

    val i2 = Input.get("/token")

    e(i1).awaitOutputUnsafe() shouldBe Some(Ok("foobar"))
    val Some(error) = e(i2).awaitOutputUnsafe()
    error.status shouldBe Status.BadRequest
    error.headers should contain key "WWW-Authenticate"
  }
} 
Example 7
Source File: SerialIntegrationTest.scala    From finagle-serial   with Apache License 2.0 5 votes vote down vote up
package io.github.finagle.serial.tests

import com.twitter.finagle.{Client, ListeningServer, Server, Service}
import com.twitter.util.{Await, Future, Try}
import io.github.finagle.serial.Serial
import java.net.{InetAddress, InetSocketAddress}
import org.scalatest.Matchers
import org.scalatest.prop.Checkers
import org.scalacheck.{Arbitrary, Gen, Prop}


  def testFunctionService[I, O](
    f: I => O
  )(implicit
    inCodec: C[I],
    outCodec: C[O],
    arb: Arbitrary[I]
  ): Unit = {
    val (fServer, fClient) = createServerAndClient(f)(inCodec, outCodec)

    check(serviceFunctionProp(fClient)(f)(arb.arbitrary))

    Await.result(fServer.close())
  }
} 
Example 8
Source File: DeltaByteArrayEncoderSuite.scala    From OAP   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.oap.io

import org.scalacheck.{Arbitrary, Gen, Properties}
import org.scalacheck.Prop.forAll
import org.scalatest.prop.Checkers

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.execution.datasources.oap.adapter.PropertiesAdapter
import org.apache.spark.sql.execution.datasources.oap.filecache.StringFiberBuilder
import org.apache.spark.sql.types.StringType
import org.apache.spark.unsafe.types.UTF8String

class DeltaByteArrayEncoderCheck extends Properties("DeltaByteArrayEncoder") {

  private val rowCountInEachGroup = Gen.choose(1, 1024)
  private val rowCountInLastGroup = Gen.choose(1, 1024)
  private val groupCount = Gen.choose(1, 100)

  property("Encoding/Decoding String Type") = forAll { (values: Array[String]) =>

    forAll(rowCountInEachGroup, rowCountInLastGroup, groupCount) {
      (rowCount, lastCount, groupCount) =>
        if (values.nonEmpty) {
          // This is the 'PLAIN' FiberBuilder to validate the 'Encoding/Decoding'
          // Normally, the test case should be:
          // values => encoded bytes => decoded bytes => decoded values (Using ColumnValues class)
          // Validate if 'values' and 'decoded values' are identical.
          // But ColumnValues only support read value form DataFile. So, we have to use another way
          // to validate.
          val referenceFiberBuilder = StringFiberBuilder(rowCount, 0)
          val fiberBuilder = DeltaByteArrayFiberBuilder(rowCount, 0, StringType)
          val fiberParser = DeltaByteArrayDataFiberParser(
            new OapDataFileMetaV1(rowCountInEachGroup = rowCount), StringType)
          !(0 until groupCount).exists { group =>
            // If lastCount > rowCount, assume lastCount = rowCount
            val count = if (group < groupCount - 1) {
              rowCount
            } else if (lastCount > rowCount) {
              rowCount
            } else {
              lastCount
            }
            (0 until count).foreach { row =>
              fiberBuilder.append(InternalRow(UTF8String.fromString(values(row % values.length))))
              referenceFiberBuilder
                .append(InternalRow(UTF8String.fromString(values(row % values.length))))
            }
            val bytes = fiberBuilder.build().fiberData
            val parsedBytes = fiberParser.parse(bytes, count)
            val referenceBytes = referenceFiberBuilder.build().fiberData
            referenceFiberBuilder.clear()
            fiberBuilder.clear()
            assert(parsedBytes.length == referenceBytes.length)
            parsedBytes.zip(referenceBytes).exists(byte => byte._1 != byte._2)
          }
        } else true
    }
  }
}

class DeltaByteArrayEncoderSuite extends SparkFunSuite with Checkers {

  test("Check Encoding/Decoding") {
    check(PropertiesAdapter.getProp(new DictionaryBasedEncoderCheck()))
  }
} 
Example 9
Source File: CodecFactorySuite.scala    From OAP   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.oap.io

import org.apache.hadoop.conf.Configuration
import org.apache.parquet.format.CompressionCodec
import org.scalacheck.{Arbitrary, Gen, Properties}
import org.scalacheck.Prop.forAllNoShrink
import org.scalatest.prop.Checkers

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.execution.datasources.oap.adapter.PropertiesAdapter

class CodecFactoryCheck extends Properties("CodecFactory") {

  private val codecFactory = new CodecFactory(new Configuration())

  private val gen = Gen.sized { size =>
    for {
      codec <- Arbitrary.arbitrary[CompressionCodec]
      times <- Gen.posNum[Int]
      bytes <- Gen.containerOfN[Array, Byte](size * 100, Arbitrary.arbitrary[Byte])
    } yield (codec, times, bytes)
  }

  property("compress/decompress") = forAllNoShrink(gen) {
    // Array[Array[Byte]] means one group of fibers' data
    case (codec, times, bytes) =>
      val compressor = codecFactory.getCompressor(codec)
      val decompressor = codecFactory.getDecompressor(codec)

      (0 until times).forall(_ => decompressor.decompress(compressor.compress(bytes), bytes.length)
        .sameElements(bytes))
  }

  implicit lazy val arbCompressionCodec: Arbitrary[CompressionCodec] = {
    Arbitrary(genCompressionCodec)
  }
  private lazy val genCompressionCodec: Gen[CompressionCodec] = Gen.oneOf(
    CompressionCodec.UNCOMPRESSED, CompressionCodec.GZIP,
    CompressionCodec.SNAPPY, CompressionCodec.LZO)
}

class CodecFactorySuite extends SparkFunSuite with Checkers {

  test("Check CodecFactory Compress/Decompress") {
    check(PropertiesAdapter.getProp(new CodecFactoryCheck()))
  }
} 
Example 10
Source File: DictionaryBasedEncoderSuite.scala    From OAP   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.datasources.oap.io

import org.apache.parquet.bytes.BytesInput
import org.apache.parquet.column.page.DictionaryPage
import org.apache.parquet.column.values.dictionary.PlainValuesDictionary.PlainBinaryDictionary
import org.scalacheck.{Arbitrary, Gen, Properties}
import org.scalacheck.Prop.forAll
import org.scalatest.prop.Checkers

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.execution.datasources.oap.adapter.PropertiesAdapter
import org.apache.spark.sql.execution.datasources.oap.filecache.StringFiberBuilder
import org.apache.spark.sql.types.StringType
import org.apache.spark.unsafe.types.UTF8String

class DictionaryBasedEncoderCheck extends Properties("DictionaryBasedEncoder") {
  private val rowCountInEachGroup = Gen.choose(1, 1024)
  private val rowCountInLastGroup = Gen.choose(1, 1024)
  private val groupCount = Gen.choose(1, 100)

  property("Encoding/Decoding String Type") = forAll { (values: Array[String]) =>

    forAll(rowCountInEachGroup, rowCountInLastGroup, groupCount) {
      (rowCount, lastCount, groupCount) =>
        if (values.nonEmpty) {
          // This is the 'PLAIN' FiberBuilder to validate the 'Encoding/Decoding'
          // Normally, the test case should be:
          // values => encoded bytes => decoded bytes => decoded values (Using ColumnValues class)
          // Validate if 'values' and 'decoded values' are identical.
          // But ColumnValues only support read value form DataFile. So, we have to use another way
          // to validate.
          val referenceFiberBuilder = StringFiberBuilder(rowCount, 0)
          val fiberBuilder = PlainBinaryDictionaryFiberBuilder(rowCount, 0, StringType)
          !(0 until groupCount).exists { group =>
            // If lastCount > rowCount, assume lastCount = rowCount
            val count =
              if (group < groupCount - 1) {
                rowCount
              } else if (lastCount > rowCount) {
                rowCount
              } else {
                lastCount
              }
            (0 until count).foreach { row =>
              fiberBuilder.append(InternalRow(UTF8String.fromString(values(row % values.length))))
              referenceFiberBuilder
                .append(InternalRow(UTF8String.fromString(values(row % values.length))))
            }
            val bytes = fiberBuilder.build().fiberData
            val dictionary = new PlainBinaryDictionary(
              new DictionaryPage(
                BytesInput.from(fiberBuilder.buildDictionary),
                fiberBuilder.getDictionarySize,
                org.apache.parquet.column.Encoding.PLAIN))
            val fiberParser = PlainDictionaryFiberParser(
              new OapDataFileMetaV1(rowCountInEachGroup = rowCount), dictionary, StringType)
            val parsedBytes = fiberParser.parse(bytes, count)
            val referenceBytes = referenceFiberBuilder.build().fiberData
            referenceFiberBuilder.clear()
            referenceFiberBuilder.resetDictionary()
            fiberBuilder.clear()
            fiberBuilder.resetDictionary()
            assert(parsedBytes.length == referenceBytes.length)
            parsedBytes.zip(referenceBytes).exists(byte => byte._1 != byte._2)
          }
        } else {
          true
        }
    }
  }
}

class DictionaryBasedEncoderSuite extends SparkFunSuite with Checkers {

  test("Check Encoding/Decoding") {
    check(PropertiesAdapter.getProp(new DictionaryBasedEncoderCheck()))
  }
} 
Example 11
Source File: PrettifyTest.scala    From spark-testing-base   with Apache License 2.0 5 votes vote down vote up
package com.holdenkarau.spark.testing

import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
import org.scalacheck.Gen
import org.scalacheck.Prop._
import org.scalacheck.util.Pretty
import org.scalatest.FunSuite
import org.scalatest.exceptions.GeneratorDrivenPropertyCheckFailedException
import org.scalatest.prop.Checkers

class PrettifyTest extends FunSuite with SharedSparkContext with Checkers with Prettify {
  implicit val propertyCheckConfig = PropertyCheckConfig(minSize = 2, maxSize = 2)

  test("pretty output of DataFrame's check") {
    val schema = StructType(List(StructField("name", StringType), StructField("age", IntegerType)))
    val sqlContext = new SQLContext(sc)
    val nameGenerator = new Column("name", Gen.const("Holden Hanafy"))
    val ageGenerator = new Column("age", Gen.const(20))

    val dataframeGen = DataframeGenerator.arbitraryDataFrameWithCustomFields(sqlContext, schema)(nameGenerator, ageGenerator)

    val actual = runFailingCheck(dataframeGen.arbitrary)
    val expected =
      Some("arg0 = <DataFrame: schema = [name: string, age: int], size = 2, values = ([Holden Hanafy,20], [Holden Hanafy,20])>")
    assert(actual == expected)
  }

  test("pretty output of RDD's check") {
    val rddGen = RDDGenerator.genRDD[(String, Int)](sc) {
      for {
        name <- Gen.const("Holden Hanafy")
        age <- Gen.const(20)
      } yield name -> age
    }

    val actual = runFailingCheck(rddGen)
    val expected =
      Some("""arg0 = <RDD: size = 2, values = ((Holden Hanafy,20), (Holden Hanafy,20))>""")
    assert(actual == expected)
  }

  test("pretty output of Dataset's check") {
    val sqlContext = new SQLContext(sc)
    import sqlContext.implicits._

    val datasetGen = DatasetGenerator.genDataset[(String, Int)](sqlContext) {
      for {
        name <- Gen.const("Holden Hanafy")
        age <- Gen.const(20)
      } yield name -> age
    }

    val actual = runFailingCheck(datasetGen)
    val expected =
      Some("""arg0 = <Dataset: schema = [_1: string, _2: int], size = 2, values = ((Holden Hanafy,20), (Holden Hanafy,20))>""")
    assert(actual == expected)
  }

  private def runFailingCheck[T](genUnderTest: Gen[T])(implicit p: T => Pretty) = {
    val property = forAll(genUnderTest)(_ => false)
    val e = intercept[GeneratorDrivenPropertyCheckFailedException] {
      check(property)
    }
    takeSecondToLastLine(e.message)
  }

  private def takeSecondToLastLine(msg: Option[String]) =
    msg.flatMap(_.split("\n").toList.reverse.tail.headOption.map(_.trim))

} 
Example 12
Source File: SampleDatasetGeneratorTest.scala    From spark-testing-base   with Apache License 2.0 5 votes vote down vote up
package com.holdenkarau.spark.testing

import org.apache.spark.sql.{Dataset, SQLContext}
import org.scalacheck.{Gen, Arbitrary}
import org.scalacheck.Prop.forAll
import org.scalatest.FunSuite
import org.scalatest.prop.Checkers

class SampleDatasetGeneratorTest extends FunSuite
    with SharedSparkContext with Checkers {

  test("test generating Datasets[String]") {
    val sqlContext = new SQLContext(sc)
    import sqlContext.implicits._

    val property =
      forAll(
        DatasetGenerator.genDataset[String](sqlContext)(
          Arbitrary.arbitrary[String])) {
        dataset => dataset.map(_.length).count() == dataset.count()
      }

    check(property)
  }

  test("test generating sized Datasets[String]") {
    val sqlContext = new SQLContext(sc)
    import sqlContext.implicits._

    val property =
      forAll {
        DatasetGenerator.genSizedDataset[(Int, String)](sqlContext) { size =>
          Gen.listOfN(size, Arbitrary.arbitrary[Char]).map(l => (size, l.mkString))
        }
      }{
        dataset =>
          val tuples = dataset.collect()
          val value = dataset.map{ case (_, str) => str.length}
          tuples.forall{ case (size, str) => size == str.length} &&
          value.count() == dataset.count
      }

    check(property)
  }

  test("test generating Datasets[Custom Class]") {
    val sqlContext = new SQLContext(sc)
    import sqlContext.implicits._

    val carGen: Gen[Dataset[Car]] =
      DatasetGenerator.genDataset[Car](sqlContext) {
        val generator: Gen[Car] = for {
          name <- Arbitrary.arbitrary[String]
          speed <- Arbitrary.arbitrary[Int]
        } yield (Car(name, speed))

        generator
    }

    val property =
      forAll(carGen) {
        dataset => dataset.map(_.speed).count() == dataset.count()
      }

    check(property)
  }
}

case class Car(name: String, speed: Int) 
Example 13
Source File: MLScalaCheckTest.scala    From spark-testing-base   with Apache License 2.0 5 votes vote down vote up
package com.holdenkarau.spark.testing

import org.apache.spark.ml.linalg.SQLDataTypes.{MatrixType, VectorType}
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.types.{StructField, StructType}
import org.scalacheck.Prop.forAll
import org.scalatest.FunSuite
import org.scalatest.prop.Checkers

class MLScalaCheckTest extends FunSuite with SharedSparkContext with Checkers {
  // re-use the spark context
  override implicit def reuseContextIfPossible: Boolean = false

  test("vector generation") {
    val schema = StructType(List(StructField("vector", VectorType)))
    val sqlContext = new SQLContext(sc)
    val dataframeGen = DataframeGenerator.arbitraryDataFrame(sqlContext, schema)

    val property =
      forAll(dataframeGen.arbitrary) {
        dataframe => {
          dataframe.schema === schema && dataframe.count >= 0
        }
      }

    check(property)
  }

  test("matrix generation") {
    val schema = StructType(List(StructField("matrix", MatrixType)))
    val sqlContext = new SQLContext(sc)
    val dataframeGen = DataframeGenerator.arbitraryDataFrame(sqlContext, schema)

    val property =
      forAll(dataframeGen.arbitrary) {
        dataframe => {
          dataframe.schema === schema && dataframe.count >= 0
        }
      }

    check(property)
  }
} 
Example 14
Source File: DatasetGeneratorSizeSpecial.scala    From spark-testing-base   with Apache License 2.0 5 votes vote down vote up
package com.holdenkarau.spark.testing

import org.apache.spark.sql.{Dataset, SQLContext}
import org.scalacheck.{Gen, Arbitrary}
import org.scalacheck.Prop.forAll
import org.scalatest.FunSuite
import org.scalatest.prop.Checkers

class DatasetGeneratorSizeSpecial extends FunSuite
    with SharedSparkContext with Checkers {

  test("test generating sized Datasets[Custom Class]") {
    val sqlContext = new SQLContext(sc)
    import sqlContext.implicits._

    // In 2.3 List is fine, however prior to 2.1 the generator returns
    // a concrete sub type which isn't handled well.
    // This works in 1.6.1+ but we only test in 2.0+ because that's easier
    val carGen: Gen[Dataset[Seq[Car]]] =
      DatasetGenerator.genSizedDataset[Seq[Car]](sqlContext) { size =>
        val slowCarsTopNumber = math.ceil(size * 0.1).toInt
        def carGenerator(speed: Gen[Int]): Gen[Car] = for {
          name <- Arbitrary.arbitrary[String]
          speed <- speed
        } yield Car(name, speed)

        val cars: Gen[List[Car]] = for {
          slowCarsNumber: Int <- Gen.choose(0, slowCarsTopNumber)
          slowCars: List[Car] <- Gen.listOfN(slowCarsNumber, carGenerator(Gen.choose(0, 20)))
          normalSpeedCars: List[Car] <- Gen.listOfN(
            size - slowCarsNumber,
            carGenerator(Gen.choose(21, 150))
          )
        } yield {
          slowCars ++ normalSpeedCars
        }
        cars
      }

    val property =
      forAll(carGen.map(_.flatMap(identity))) {
        dataset =>
          val cars = dataset.collect()
          val dataSetSize  = cars.length
          val slowCars = cars.filter(_.speed < 21)
          slowCars.length <= dataSetSize * 0.1 &&
            cars.map(_.speed).length == dataSetSize
      }

    check(property)
  }
} 
Example 15
Source File: BreezeCheck.scala    From nd4s   with Apache License 2.0 5 votes vote down vote up
package org.nd4s

import breeze.linalg._
import monocle.Prism
import org.nd4j.linalg.api.ndarray.INDArray
import org.nd4j.linalg.factory.Nd4j
import org.nd4s.Implicits._
import org.scalacheck.{Arbitrary, Gen, Prop}
import org.scalatest.FlatSpec
import org.scalatest.prop.Checkers


class BreezeCheck extends FlatSpec with Checkers {
  it should "work as same as NDArray slicing" in {
    check {
      Prop.forAll {
        (ndArray: INDArray) =>
          ndArray.setOrder('f')
          val shape = ndArray.shape()
          val Array(row, col) = shape
          Prop.forAll(Gen.choose(0, row - 1), Gen.choose(0, row - 1), Gen.choose(0, col - 1), Gen.choose(0, col - 1)) {
            (r1, r2, c1, c2) =>
              val rowRange = if (r1 > r2) r2 to r1 else r1 to r2
              val columnRange = if (c1 > c2) c2 to c1 else c1 to c2
              val slicedByND4S = ndArray(rowRange, columnRange)
              val slicedByBreeze = prism.getOption(ndArray).map(dm => prism.reverseGet(dm(rowRange, columnRange)))
              slicedByBreeze.exists(_ == slicedByND4S)
          }
      }
    }
  }

  //This supports only real value since ND4J drops complex number support temporary.
  lazy val prism = Prism[INDArray, DenseMatrix[Double]] { ndArray =>
    //Breeze DenseMatrix doesn't support tensor nor C order matrix.
    if (ndArray.rank() > 2 || ndArray.ordering() == 'c')
      None
    else {
      val shape = ndArray.shape()
      val linear = ndArray.linearView()
      val arr = (0 until ndArray.length()).map(linear.getDouble).toArray
      Some(DenseMatrix(arr).reshape(shape(0), shape(1)))
    }
  } {
    dm =>
      val shape = Array(dm.rows, dm.cols)
      dm.toArray.mkNDArray(shape, NDOrdering.Fortran)
  }

  implicit def arbNDArray: Arbitrary[INDArray] = Arbitrary {
    for {
      rows <- Gen.choose(1, 100)
      columns <- Gen.choose(1, 100)
    } yield {
      val nd = Nd4j.rand(rows, columns)
      nd.setOrder('f')
      nd
    }
  }

}