org.openjdk.jmh.annotations.State Scala Examples

The following examples show how to use org.openjdk.jmh.annotations.State. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: StreamInputOutputBenchmark.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package ser

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, DataInputStream, DataOutputStream}

import com.avsystem.commons.serialization.{GenCodec, StreamInput, StreamOutput}
import org.openjdk.jmh.annotations.{Benchmark, BenchmarkMode, Fork, Measurement, Mode, Scope, State, Warmup}
import org.openjdk.jmh.infra.Blackhole


case class Toplevel(int: Int, nested: Nested, str: String)
case class Nested(list: List[Int], int: Int)

object Toplevel {
  implicit val nestedCodec: GenCodec[Nested] = GenCodec.materialize[Nested]
  implicit val codec: GenCodec[Toplevel] = GenCodec.materialize[Toplevel]
}

@Warmup(iterations = 10)
@Measurement(iterations = 20)
@Fork(1)
@BenchmarkMode(Array(Mode.Throughput))
@State(Scope.Thread)
class StreamInputOutputBenchmark {

  val something = Toplevel(35, Nested(List(121, 122, 123, 124, 125, 126), 53), "lol")

  val inputArray: Array[Byte] = {
    val os = new ByteArrayOutputStream()

    GenCodec.write(new StreamOutput(new DataOutputStream(os)), something)
    os.toByteArray
  }

  @Benchmark
  def testEncode(bh: Blackhole): Unit = {
    val os = new ByteArrayOutputStream(inputArray.length)
    val output = new StreamOutput(new DataOutputStream(os))
    GenCodec.write(output, something)
    bh.consume(os.toByteArray)
  }

  @Benchmark
  def testDecode(bh: Blackhole): Unit = {
    val is = new DataInputStream(new ByteArrayInputStream(inputArray))
    val input = new StreamInput(is)
    bh.consume(GenCodec.read[Toplevel](input))
  }

  @Benchmark
  def testEncodeRaw(bh: Blackhole): Unit = {
    val os = new ByteArrayOutputStream(inputArray.length)
    val output = new StreamOutput(new DataOutputStream(os))
    val toplevelOutput = output.writeObject()
    toplevelOutput.writeField("int").writeSimple().writeInt(35)
    val nestedOutput = toplevelOutput.writeField("nested").writeObject()
    val listOutput = nestedOutput.writeField("list").writeList()
    listOutput.writeElement().writeSimple().writeInt(121)
    listOutput.writeElement().writeSimple().writeInt(122)
    listOutput.writeElement().writeSimple().writeInt(123)
    listOutput.writeElement().writeSimple().writeInt(124)
    listOutput.writeElement().writeSimple().writeInt(125)
    listOutput.writeElement().writeSimple().writeInt(126)
    listOutput.finish()
    nestedOutput.writeField("int").writeSimple().writeInt(53)
    nestedOutput.finish()
    toplevelOutput.writeField("str").writeSimple().writeString("lol")
    toplevelOutput.finish()
    bh.consume(os.toByteArray)
  }

  @Benchmark
  def testDecodeRaw(bh: Blackhole): Unit = {
    val is = new DataInputStream(new ByteArrayInputStream(inputArray))
    val input = new StreamInput(is)
    val objInput = input.readObject()
    val intField = objInput.nextField().readSimple().readInt()
    val nestedInput = objInput.nextField().readObject()
    val listInput = nestedInput.nextField().readList()
    val listNested = List(
      listInput.nextElement().readSimple().readInt(),
      listInput.nextElement().readSimple().readInt(),
      listInput.nextElement().readSimple().readInt(),
      listInput.nextElement().readSimple().readInt(),
      listInput.nextElement().readSimple().readInt(),
      listInput.nextElement().readSimple().readInt()
    )
    listInput.hasNext
    val intNested = nestedInput.nextField().readSimple().readInt()
    nestedInput.hasNext
    val strField = objInput.nextField().readSimple().readString()
    objInput.hasNext
    bh.consume(Toplevel(intField, Nested(listNested, intNested), strField))
  }
} 
Example 2
Source File: RESPFrameBench.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package protocol

import org.openjdk.jmh.annotations.{Benchmark, Scope, State}
import scodec.bits.BitVector
import eu.timepit.refined.types.string.NonEmptyString
import java.nio.ByteBuffer

import laserdisc.RESPFrameFixture
import org.openjdk.jmh.infra.Blackhole

@State(Scope.Benchmark)
class RESPFrameBench extends RESPFrameFixture {

  val mixedNoArr = bytesOf(mixedNoArrList)
  val arrOneLevel = bytesOf(arrOneLevelList)
  val arrFiveLevels = bytesOf(arrFiveLevelsList)

  val empty = BitVector.empty.toByteBuffer
  val mixedNoArrFull = BitVector(mixedNoArr).toByteBuffer
  val arrOneLevelFull = BitVector(arrOneLevel).toByteBuffer
  val arrFiveLevelsFull = BitVector(arrFiveLevels).toByteBuffer

  @Benchmark def frameOfFullBaseline(bh: Blackhole)= {
    val frame = EmptyFrame.append(empty)
    bh.consume(frame)
  }
  @Benchmark def frameOfMixedNoArrFull(bh: Blackhole) = {
    val frame = EmptyFrame.append(mixedNoArrFull)
    bh.consume(frame)
  }
  @Benchmark def frameOfMixedArrOneLevelFull(bh: Blackhole) = {
    val frame = EmptyFrame.append(arrOneLevelFull)
    bh.consume(frame)
  }
  @Benchmark def frameOfMixedArrFiveLevelsFull(bh: Blackhole) = {
    val frame = EmptyFrame.append(arrFiveLevelsFull)
    bh.consume(frame)
  }

  val mixedNoArrSmallChunkBuffers    = groupInChunks(mixedNoArr, 128)
  val arrOneLevelSmallChunkBuffers   = groupInChunks(arrOneLevel, 128)
  val arrFiveLevelsSmallChunkBuffers = groupInChunks(arrFiveLevels, 128)

  @Benchmark def frameOfChunkedBaseline(bh: Blackhole)= {
    val frames = appendChunks(Iterator.empty[BitVector])
    bh.consume(frames)
  }
  @Benchmark def frameOfChunkedShortMixedNoArr(bh: Blackhole)= {
    val frames = appendChunks(mixedNoArrSmallChunkBuffers)
    bh.consume(frames)
  }
  @Benchmark def frameOfChunkedShortArrOneLevel(bh: Blackhole)   = {
    val frames = appendChunks(arrOneLevelSmallChunkBuffers)
    bh.consume(frames)
  }
  @Benchmark def frameOfChunkedShortArrFiveLevels(bh: Blackhole) = {
    val frames = appendChunks(arrFiveLevelsSmallChunkBuffers)
    bh.consume(frames)
  }

  val mixedNoArrBigChunkBuffers    = groupInChunks(mixedNoArr, 1024)
  val arrOneLevelBigChunkBuffers   = groupInChunks(arrOneLevel, 1024)
  val arrFiveLevelsBigChunkBuffers = groupInChunks(arrFiveLevels, 1024)

  @Benchmark def frameOfChunkedLongMixedNoArr(bh: Blackhole)    = {
    val frames = appendChunks(mixedNoArrBigChunkBuffers)
    bh.consume(frames)
  }
  @Benchmark def frameOfChunkedLongArrOneLevel(bh: Blackhole)   = {
    val frames = appendChunks(arrOneLevelBigChunkBuffers)
    bh.consume(frames)
  }
  @Benchmark def frameOfChunkedLongArrFiveLevels(bh: Blackhole) = {
    val frames = appendChunks(arrFiveLevelsBigChunkBuffers)
    bh.consume(frames)
  }
} 
Example 3
Source File: ProtocolBenchArr.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package protocol

import org.openjdk.jmh.annotations.{Benchmark, Scope, State}
import org.openjdk.jmh.infra.Blackhole
import shapeless._

@State(Scope.Benchmark)
class ProtocolBenchArr {
  final case class A(s: List[String])
  implicit val ev1: Arr ==> A =
    Read.infallible { arr =>
      A(arr.elements.map {
        case Bulk(s) => s
        case _ => ""
      })
    }

  private final def protocol = Protocol("CUSTOM", _: String :: HNil).as[Arr, Seq[Long]]
  private final def protocolArr = Protocol("CUSTOM", _: String :: HNil).as[Arr, Seq[A]]
  private final def protocolWithNull = Protocol("CUSTOM", _: String :: HNil).as[Arr, Seq[Option[Long]]]
  private final def protocolArrWithNull = Protocol("CUSTOM", _: String :: HNil).as[Arr, Seq[Option[A]]]
  private final def protocolPairs = Protocol("CUSTOM", _: String :: HNil).as[Arr, Seq[(String, Long)]]

  private final val request  = "id" :: HNil

  private final val response1 = Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"))
  private final val response2 = Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"))
  private final val response3 = Arr(
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"))
  )
  private final val response4 = Arr(
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("2"), Bulk("1"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), NullBulk, Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), NullBulk, Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk),
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), NullBulk, Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), NullBulk, Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk),
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20"), Bulk("1"), Bulk("2"), Bulk("3"), Bulk("4"), Bulk("5"), Bulk("6"), Bulk("7"), Bulk("8"), Bulk("9"), Bulk("10"), Bulk("11"), Bulk("12"), Bulk("13"), Bulk("14"), Bulk("15"), Bulk("16"), Bulk("17"), Bulk("18"), Bulk("19"), Bulk("20")),
    Arr(Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20"), Bulk("1"), NullBulk, Bulk("3"), NullBulk, Bulk("5"), Bulk("6"), NullBulk, NullBulk, Bulk("9"), NullBulk, Bulk("11"), Bulk("12"), NullBulk, Bulk("14"), NullBulk, Bulk("16"), Bulk("17"), Bulk("18"), NullBulk, Bulk("20")),
    Arr(Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk, Bulk("1"), Bulk("2"), Bulk("3"), NullBulk, Bulk("5"), NullBulk, Bulk("7"), NullBulk, Bulk("9"), NullBulk, NullBulk, NullBulk, NullBulk, NullBulk, Bulk("15"), Bulk("16"), NullBulk, Bulk("18"), Bulk("19"), NullBulk)
  )
  private final val response5 = Arr(Bulk("abcd-1"), Bulk("1"), Bulk("abcd-2"), Bulk("2"), Bulk("abcd-3"), Bulk("3"), Bulk("abcd-4"), Bulk("4"), Bulk("abcd-5"), Bulk("5"), Bulk("abcd-6"), Bulk("6"), Bulk("abcd-7"), Bulk("7"), Bulk("abcd-8"), Bulk("8"), Bulk("abcd-9"), Bulk("9"), Bulk("abcd-10"), Bulk("10"), Bulk("abcd-11"), Bulk("11"), Bulk("abcd-12"), Bulk("12"), Bulk("abcd-13"), Bulk("13"), Bulk("abcd-14"), Bulk("14"), Bulk("abcd-15"), Bulk("15"), Bulk("abcd-16"), Bulk("16"), Bulk("abcd-17"), Bulk("17"), Bulk("abcd-18"), Bulk("18"), Bulk("abcd-19"), Bulk("19"), Bulk("abcd-20"), Bulk("20"), Bulk("abcd-1"), Bulk("1"), Bulk("abcd-2"), Bulk("2"), Bulk("abcd-3"), Bulk("3"), Bulk("abcd-4"), Bulk("4"), Bulk("abcd-5"), Bulk("5"), Bulk("abcd-6"), Bulk("6"), Bulk("abcd-7"), Bulk("7"), Bulk("abcd-8"), Bulk("8"), Bulk("abcd-9"), Bulk("9"), Bulk("abcd-10"), Bulk("10"), Bulk("abcd-11"), Bulk("11"), Bulk("abcd-12"), Bulk("12"), Bulk("abcd-13"), Bulk("13"), Bulk("abcd-14"), Bulk("14"), Bulk("abcd-15"), Bulk("15"), Bulk("abcd-16"), Bulk("16"), Bulk("abcd-17"), Bulk("17"), Bulk("abcd-18"), Bulk("18"), Bulk("abcd-19"), Bulk("19"), Bulk("abcd-20"), Bulk("20"))

  @Benchmark def decodeArrBaseline(bh: Blackhole) = {
    val decoded = protocol(request).decode(Arr(Nil))
    bh.consume(decoded)
  }

  @Benchmark def decodeArrOfBulk(bh: Blackhole) = {
    val decoded = protocol(request).decode(response1)
    bh.consume(decoded)
  }
  @Benchmark def decodeArrOfBulkWithNull(bh: Blackhole) = {
    val decoded = protocolWithNull(request).decode(response2)
    bh.consume(decoded)
  }
  @Benchmark def decodeArrOfArrOfBulk(bh: Blackhole) = {
    val decoded = protocol(request).decode(response3)
    bh.consume(decoded)
  }
  @Benchmark def decodeArrOfArrOfBulkWithNull(bh: Blackhole) = {
    val decoded = protocolWithNull(request).decode(response4)
    bh.consume(decoded)
  }
  @Benchmark def decodeArrOfPairs(bh: Blackhole) = {
    val decoded = protocolPairs(request).decode(response5)
    bh.consume(decoded)
  }
} 
Example 4
Source File: RESPBench.scala    From laserdisc   with MIT License 5 votes vote down vote up
package laserdisc
package protocol

import java.nio.charset.StandardCharsets.UTF_8

import org.openjdk.jmh.annotations.{Benchmark, Scope, State}
import scodec.bits.BitVector
import scodec.codecs.utf8
import scodec.{Attempt, Codec}

@State(Scope.Benchmark)
class RESPBench {

  private final val codec = Codec[RESP]

  private final val chars = 2000

  private final val ok              = "OK"
  private final val okRedis         = s"+$ok$CRLF"
  private final val rtProblem       = "runtime problem"
  private final val rtProblemRedis  = s"-$rtProblem$CRLF"
  private final val fortyTwo        = 42L
  private final val fortyTwoRedis   = s":$fortyTwo$CRLF"
  private final val longString      = new String(Array.fill(chars)('a'))
  private final val longStringRedis = s"$$$chars$CRLF$longString$CRLF"
  private final val longStringI =
    "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"

  private final val str             = Str(ok)
  private final val strBits         = BitVector(okRedis.getBytes(UTF_8))
  private final val err             = Err(rtProblem)
  private final val errBits         = BitVector(rtProblemRedis.getBytes(UTF_8))
  private final val num             = Num(fortyTwo)
  private final val numBits         = BitVector(fortyTwoRedis.getBytes(UTF_8))
  private final val bulk            = Bulk(longString)
  private final val bulkBits        = BitVector(longStringRedis.getBytes(UTF_8))
  private final val longStringBits  = BitVector(longString.getBytes(UTF_8))
  private final val longStringIBits = BitVector(longStringI.getBytes(UTF_8))

  @Benchmark def baseline_utf8_encode: Attempt[BitVector]  = utf8.encode(longString)
  @Benchmark def baseline_utf8_decode: Attempt[String]     = utf8.decodeValue(longStringBits)
  @Benchmark def baseline_utf8_encodeI: Attempt[BitVector] = utf8.encode(longStringI)
  @Benchmark def baseline_utf8_decodeI: Attempt[String]    = utf8.decodeValue(longStringIBits)
  @Benchmark def str_encode: Attempt[BitVector]            = codec.encode(str)
  @Benchmark def str_decode: Attempt[RESP]                 = codec.decodeValue(strBits)
  @Benchmark def err_encode: Attempt[BitVector]            = codec.encode(err)
  @Benchmark def err_decode: Attempt[RESP]                 = codec.decodeValue(errBits)
  @Benchmark def num_encode: Attempt[BitVector]            = codec.encode(num)
  @Benchmark def num_decode: Attempt[RESP]                 = codec.decodeValue(numBits)
  @Benchmark def bulk_encode: Attempt[BitVector]           = codec.encode(bulk)
  @Benchmark def bulk_decode: Attempt[RESP]                = codec.decodeValue(bulkBits)
} 
Example 5
Source File: BigSum.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.lang.v1

import com.wavesplatform.lang.v1.compiler.Terms.{CONST_LONG, EXPR, FUNCTION_CALL}
import com.wavesplatform.lang.v1.evaluator.ctx.impl.PureContext
import org.openjdk.jmh.annotations.{Scope, State}

@State(Scope.Benchmark)
class BigSum {
  private val bigSum = (1 to 100).foldLeft[EXPR](CONST_LONG(0)) { (r, i) =>
    FUNCTION_CALL(
      function = PureContext.sumLong,
      args = List(r, CONST_LONG(i))
    )
  }

  val expr: EXPR = FUNCTION_CALL(
    function = PureContext.eq,
    args = List(CONST_LONG(1), bigSum)
  )
} 
Example 6
Source File: DBState.scala    From Waves   with MIT License 5 votes vote down vote up
package com.wavesplatform.state

import java.io.File

import com.wavesplatform.Application
import com.wavesplatform.account.AddressScheme
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.database.{LevelDBWriter, openDB}
import com.wavesplatform.lang.directives.DirectiveSet
import com.wavesplatform.settings.WavesSettings
import com.wavesplatform.transaction.smart.WavesEnvironment
import com.wavesplatform.utils.ScorexLogging
import monix.eval.Coeval
import org.iq80.leveldb.DB
import org.openjdk.jmh.annotations.{Param, Scope, State, TearDown}

@State(Scope.Benchmark)
abstract class DBState extends ScorexLogging {
  @Param(Array("waves.conf"))
  var configFile = ""

  lazy val settings: WavesSettings = Application.loadApplicationConfig(Some(new File(configFile)).filter(_.exists()))

  lazy val db: DB = openDB(settings.dbSettings.directory)

  lazy val levelDBWriter: LevelDBWriter =
    LevelDBWriter.readOnly(
      db,
      settings.copy(dbSettings = settings.dbSettings.copy(maxCacheSize = 1))
    )

  AddressScheme.current = new AddressScheme { override val chainId: Byte = 'W' }

  lazy val environment = new WavesEnvironment(
    AddressScheme.current.chainId,
    Coeval.raiseError(new NotImplementedError("`tx` is not implemented")),
    Coeval(levelDBWriter.height),
    levelDBWriter,
    null,
    DirectiveSet.contractDirectiveSet,
    ByteStr.empty
  )

  @TearDown
  def close(): Unit = {
    db.close()
  }
} 
Example 7
Source File: FieldLookupBenchmarks.scala    From scalaz-deriving   with GNU Lesser General Public License v3.0 5 votes vote down vote up
// Copyright: 2017 - 2020 Sam Halliday
// License: http://www.gnu.org/licenses/lgpl-3.0.en.html

package jsonformat.benchmarks

import jsonformat._
import jsonformat.BenchmarkUtils.getResourceAsString
import org.openjdk.jmh.annotations.{ Benchmark, Scope, Setup, State }
import scalaz._, Scalaz._

import JsDecoder.ops._

// jsonformat/jmh:run -i 5 -wi 5 -f1 -t2 -w1 -r1 FieldLookupBenchmarks.*

final case class FieldLookup(
  id: Long
)

// when benching magnolia, we seen some strange perf patterns when looking up
// fields in the twitter model. My suspicion is that large case classes or
// backtick fields have a perf regression.
@State(Scope.Benchmark)
class FieldLookupBenchmarks {

  var user: s.User          = _
  var baseline: FieldLookup = _

  @Setup
  def setup(): Unit = {
    val raw = getResourceAsString("twitter_api_response.json")
    val ast = JsParser(raw).getOrElse(null)
    user = ast.as[List[s.Tweet]].getOrElse(null).head.user
    baseline = FieldLookup(0)
  }

  @Benchmark
  def fieldAccessBaseline(): Long = baseline.id

  @Benchmark
  def elementAccessBaseline(): Long =
    baseline.productElement(0).asInstanceOf[Long]

  @Benchmark
  def fieldAccessPlain(): Long = user.id

  @Benchmark
  def elementAccessPlain(): Long = user.productElement(0).asInstanceOf[Long]

  @Benchmark
  def fieldAccessBacktick(): Boolean = user.`protected`

  @Benchmark
  def elementAccessBacktick(): Boolean =
    user.productElement(8).asInstanceOf[Boolean]

  @Benchmark
  def fieldAccessXl(): String = user.translator_type

  @Benchmark
  def elementAccessXl(): String = user.productElement(41).asInstanceOf[String]

  @Benchmark
  def indirectionAccessValXl(): String    = translator_type_val(user)
  @Benchmark
  def indirectionAccessDefXl(): String    = translator_type_def(user)
  @Benchmark
  def indirectionAccessDefValXl(): String =
    translator_type_abstract(user, translator_type_val)
  @Benchmark
  def indirectionAccessDefDefXl(): String =
    translator_type_abstract(user, translator_type_def)

  val translator_type_val                                              = (u: s.User) => u.translator_type
  def translator_type_def(u: s.User)                                   = u.translator_type
  def translator_type_abstract(u: s.User, f: s.User => String): String = f(u)

} 
Example 8
Source File: StringyMapBenchmarks.scala    From scalaz-deriving   with GNU Lesser General Public License v3.0 5 votes vote down vote up
// Copyright: 2017 - 2020 Sam Halliday
// License: http://www.gnu.org/licenses/lgpl-3.0.en.html

package jsonformat.benchmarks

import jsonformat._
import internal._
import org.openjdk.jmh.annotations.{ Benchmark, Param, Scope, Setup, State }
import scalaz._, Scalaz._

// jsonformat/jmh:run -i 5 -wi 5 -f1 -t2 -w1 -r1 StringyMapBenchmarks.*

@State(Scope.Benchmark)
class StringyMapBenchmarks {
  @Param(Array("2", "4", "8", "16", "32", "64", "128", "256", "512", "1024"))
  var size: Int = 0

  var data: IList[(String, String)]     = _
  var queries: IList[String]            = IList((size / 2).toString, "<not here>")
  var stringy_ilist: StringyMap[String] = _
  var stringy_java: StringyMap[String]  = _

  @Setup
  def setup(): Unit = {
    data = (0 |-> size).map(i => (i.toString, i.toString)).toIList
    stringy_ilist = createIList()
    stringy_java = createJava()
  }

  //@Benchmark
  def createIList(): StringyMap[String] = StringyIList(data)
  @Benchmark
  def createJava(): StringyMap[String]  = StringyJavaMap(data, 16)

  @Benchmark
  def lookupIList(): IList[Maybe[String]] = queries.map(stringy_ilist.get)
  @Benchmark
  def lookupJava(): IList[Maybe[String]]  = queries.map(stringy_java.get)

}

// jsonformat/jmh:run -i 1 -wi 1 -f1 -t2 -w1 -r1 StringyMapMoarBenchmarks.*
//
// jsonformat/jmh:run -i 1 -wi 1 -f1 -t2 -w1 -r1 StringyMapMoarBenchmarks.* -p numEntries=64 -p numQueries=64
@State(Scope.Benchmark)
class StringyMapMoarBenchmarks {
  @Param(Array("1", "2", "4", "8", "16"))
  var numEntries: Int = 0

  @Param(Array("1", "2", "3", "4", "5", "6"))
  var numQueries: Int = 0

  var data: IList[(String, String)] = _
  var queries: IList[String]        = _

  @Setup
  def setup(): Unit = {
    data = (0 |-> numEntries).map(i => (i.toString, i.toString)).toIList
    queries = (0 |-> numQueries).reverse.map(i => i.toString).toIList
  }

  @Benchmark
  def aggregateIList(): IList[Maybe[String]] = {
    val lookup = StringyIList(data)
    queries.map(lookup.get)
  }
  @Benchmark
  def aggregateJava(): IList[Maybe[String]] = {
    val lookup = StringyJavaMap(data, numQueries)
    queries.map(lookup.get)
  }

} 
Example 9
Source File: MurmurHash3Benchmark.scala    From bloom-filter-scala   with MIT License 5 votes vote down vote up
package bloomfilter.hashing

import java.nio.ByteBuffer

import sandbox.hashing.{YonikMurmurHash3, CassandraMurmurHash, AlgebirdMurmurHash128}
import sandbox.hashing.YonikMurmurHash3.LongPair
import com.yahoo.sketches.hash.{MurmurHash3 => yMurmurHash3}
import com.google.common.hash.Hashing
import org.openjdk.jmh.annotations.{Benchmark, Scope, State}
import scala.util.hashing.{MurmurHash3 => ScalaMurmurHash3}
import com.clearspring.analytics.hash.{MurmurHash => StreamLibMurmurHash}

@State(Scope.Benchmark)
class MurmurHash3Benchmark {

  val key = Range(0, 64).map(_.toByte).toArray

  @Benchmark
  def javaVersion() = {
    YonikMurmurHash3.murmurhash3_x64_128(key, 0, key.length, 0, new LongPair)
  }

  @Benchmark
  def scalaVersion() = {
    MurmurHash3Generic.murmurhash3_x64_128(key, 0, key.length, 0)
  }

  val guavaMurmur = Hashing.murmur3_128()

  @Benchmark
  def guavaVersion() = {
    guavaMurmur.hashBytes(key, 0, key.length)
  }

  @Benchmark
  def cassandraVersion() = {
    CassandraMurmurHash.hash3_x64_128(ByteBuffer.wrap(key), 0, key.length, 0)
  }

  val algebirdMurmur = AlgebirdMurmurHash128(0)

  @Benchmark
  def algebirdVersion() = {
    algebirdMurmur.apply(key)
  }

  @Benchmark
  def yahooVersion() = {
    yMurmurHash3.hash(key, 0)
  }

  @Benchmark
  def scalaStdlibVersion() = {
    ScalaMurmurHash3.arrayHash(key, 0)
  }

  @Benchmark
  def streamLibVersion() = {
    StreamLibMurmurHash.hash(key)
  }
} 
Example 10
Source File: MurmurHash3GenericBenchmark.scala    From bloom-filter-scala   with MIT License 5 votes vote down vote up
package bloomfilter.hashing

import org.openjdk.jmh.annotations.{Benchmark, Scope, State}
import sandbox.hashing.MurmurHash3

@State(Scope.Benchmark)
class MurmurHash3GenericBenchmark {

  val key = Range(0, 64).map(_.toByte).toArray

  @Benchmark
  def scalaVersion() = {
    MurmurHash3.murmurhash3_x64_128(key, 0, key.length, 0)
  }

  @Benchmark
  def genericVersion() = {
    MurmurHash3Generic.murmurhash3_x64_128(key, 0, key.length, 0)
  }
} 
Example 11
Source File: UnsafeBitArrayBenchmark.scala    From bloom-filter-scala   with MIT License 5 votes vote down vote up
package bloomfilter

import java.util.BitSet

import bloomfilter.mutable.UnsafeBitArray
import org.openjdk.jmh.annotations.{Benchmark, Scope, State}
import sandbox.bloomfilter.mutable.ChronicleBitArray

@State(Scope.Benchmark)
class UnsafeBitArrayBenchmark {

  private val numberOfBits = Int.MaxValue

  val unsafeBits = new UnsafeBitArray(numberOfBits.toLong)
  val bitsSet = new BitSet(numberOfBits)
  val chronicle = new ChronicleBitArray(numberOfBits.toLong)

  @Benchmark
  def getUnsafe() = {
    unsafeBits.get(1)
    unsafeBits.get(10)
    unsafeBits.get(100)
    unsafeBits.get(1000)
    unsafeBits.get(10000)
    unsafeBits.get(100000)
    unsafeBits.get(1000000)
  }

  @Benchmark
  def getBitSet() = {
    bitsSet.get(1)
    bitsSet.get(10)
    bitsSet.get(100)
    bitsSet.get(1000)
    bitsSet.get(10000)
    bitsSet.get(100000)
    bitsSet.get(1000000)
  }

  @Benchmark
  def getChronicle() = {
    chronicle.get(1)
    chronicle.get(10)
    chronicle.get(100)
    chronicle.get(1000)
    chronicle.get(10000)
    chronicle.get(100000)
    chronicle.get(1000000)
  }


} 
Example 12
Source File: CornichonJsonBench.scala    From cornichon   with Apache License 2.0 5 votes vote down vote up
package parsers

import cats.instances.string._
import com.github.agourlay.cornichon.json.CornichonJson
import org.openjdk.jmh.annotations.{ Benchmark, BenchmarkMode, Fork, Measurement, Mode, Scope, State, Warmup }

@State(Scope.Benchmark)
@BenchmarkMode(Array(Mode.Throughput))
@Warmup(iterations = 10)
@Measurement(iterations = 10)
@Fork(value = 1, jvmArgsAppend = Array(
  "-XX:+FlightRecorder",
  "-XX:StartFlightRecording=filename=./CornichonJSonBench-profiling-data.jfr,name=profile,settings=profile",
  "-Xmx1G"))
class CornichonJsonBench {

  

  @Benchmark
  def parseDslStringJsonString() = {
    val res = CornichonJson.parseDslJson("cornichon")
    assert(res.isRight)
  }

  @Benchmark
  def parseDslStringJsonArray() = {
    val res = CornichonJson.parseDslJson("""[ "cornichon", "cornichon" ] """)
    assert(res.isRight)
  }

  @Benchmark
  def parseDslStringJsonTable() = {
    val res = CornichonJson.parseDslJson("""
      Name   |   Age  |
      "John" |   30   |
      "Bob"  |   41   |
    """)
    assert(res.isRight)
  }

} 
Example 13
Source File: RandomUtilBenchmarks.scala    From aerosolve   with Apache License 2.0 5 votes vote down vote up
package com.airbnb.common.ml.util

import scala.collection.mutable

import org.openjdk.jmh.annotations.Benchmark
import org.openjdk.jmh.annotations.BenchmarkMode
import org.openjdk.jmh.annotations.Fork
import org.openjdk.jmh.annotations.Measurement
import org.openjdk.jmh.annotations.Mode
import org.openjdk.jmh.annotations.Param
import org.openjdk.jmh.annotations.Scope
import org.openjdk.jmh.annotations.State
import org.openjdk.jmh.annotations.Warmup


object RandomUtilBenchmarks {


  
  @BenchmarkMode(Array(Mode.Throughput))
  @Fork(value = 1, warmups = 0)
  @Warmup(iterations = 4, time = 2)
  @Measurement(iterations = 8, time = 2)
  @State(value = Scope.Benchmark)
  class SampleBenchmarks {

    // Try different collection sizes
    @Param(value = Array("1000", "10000"))
    var numItemsToGenerate: Int = _

    val ratios: Seq[Double] = Seq(0.85, 0.1, 0.05)


    @Benchmark
    def sampleArray(): Seq[Seq[Int]] = {
      RandomUtil.sample(
        Array.range(1, numItemsToGenerate),
        ratios
      )
    }

    @Benchmark
    def sampleArraySeq(): Seq[Seq[Int]] = {
      RandomUtil.sample(
        mutable.ArraySeq.range(1, numItemsToGenerate),
        ratios
      )
    }

    @Benchmark
    def sampleList(): Seq[Seq[Int]] = {
      RandomUtil.sample(
        List.range(1, numItemsToGenerate),
        ratios
      )
    }

    @Benchmark
    def sampleSeq(): Seq[Seq[Int]] = {
      RandomUtil.sample(
        Seq.range(1, numItemsToGenerate),
        ratios
      )
    }

    @Benchmark
    def sampleVector(): Seq[Seq[Int]] = {
      RandomUtil.sample(
        Vector.range(1, numItemsToGenerate),
        ratios
      )
    }

    @Benchmark
    def sampleSeqToVector(): Seq[Seq[Int]] = {
      RandomUtil.sample(
        Seq.range(1, numItemsToGenerate).toVector,
        ratios
      )
    }
  }
} 
Example 14
Source File: TestState.scala    From coursier   with Apache License 2.0 5 votes vote down vote up
package coursier.benchmark

import coursier.cache.Cache
import coursier.core.{Configuration, ResolutionProcess}
import coursier.{Repositories, Resolve, dependencyString, moduleString}
import coursier.internal.InMemoryCachingFetcher
import coursier.maven.{MavenRepository, Pom}
import coursier.util.Sync
import org.openjdk.jmh.annotations.{Scope, State}

import scala.concurrent.{Await, ExecutionContext}
import scala.concurrent.duration.Duration

@State(Scope.Benchmark)
class TestState {

  val initialSparkSqlRes = Resolve.initialResolution(
    Seq(dep"org.apache.spark:spark-sql_2.12:2.4.0")
  )

  val initialCoursierCliRes = Resolve.initialResolution(
    Seq(dep"io.get-coursier:coursier-cli_2.12:1.1.0-M10")
  )

  val repositories = Seq(
    Repositories.central
  )

  val repositoriesDom = {
    val l = Seq(
      MavenRepository("https://repo1.maven.org/maven2")
    )
    for (r <- l)
      r.useSaxParser = false
    l
  }

  val pool = Sync.fixedThreadPool(6)
  val ec = ExecutionContext.fromExecutorService(pool)

  val inMemoryCache = {
    val c = new InMemoryCachingFetcher(Cache.default.fetch)
    val fetch = ResolutionProcess.fetch(repositories, c.fetcher)

    for (initialRes <- Seq(initialSparkSqlRes, initialCoursierCliRes)) {
      val t = Resolve.runProcess(initialRes, fetch)
      Await.result(t.future()(ec), Duration.Inf)
    }

    c.onlyCache()
    c
  }

  val fetcher = inMemoryCache.fetcher

  val fetch = ResolutionProcess.fetch(repositories, fetcher)
  val fetchDom = ResolutionProcess.fetch(repositoriesDom, fetcher)

  val forProjectCache = {

    val modules = Seq(
      mod"org.apache:apache" -> "18",
      mod"org.apache.spark:spark-parent_2.12" -> "2.4.0",
      mod"org.apache.spark:spark-sql_2.12" -> "2.4.0"
    )

    modules.map {
      case (m, v) =>
        val org = m.organization.value
        val name = m.name.value
        val url = s"https://repo1.maven.org/maven2/${org.replace('.', '/')}/$name/$v/$name-$v.pom"
        val str = inMemoryCache.fromCache(url)
        val p = MavenRepository.parseRawPomDom(str).toOption.get
        val p0 = Pom.addOptionalDependenciesInConfig(
          p.withActualVersionOpt(Some(v))
            .withConfigurations(MavenRepository.defaultConfigurations),
          Set(Configuration.empty, Configuration.default),
          Configuration.optional
        )
        (m, v, p0)
    }
  }

} 
Example 15
Source File: SnapshotAssemblerBench.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package benches

import java.io.File
import java.util.concurrent.TimeUnit

import benches.SnapshotAssemblerBench.SnapshotAssemblerBenchState
import encry.view.state.avlTree.utils.implicits.Instances._
import benches.StateBenches.{StateBenchState, benchSettings}
import benches.Utils.{getRandomTempDir, utxoFromBoxHolder}
import encry.settings.Settings
import encry.storage.{RootNodesStorage, VersionalStorage}
import encry.storage.VersionalStorage.{StorageKey, StorageValue, StorageVersion}
import encry.storage.levelDb.versionalLevelDB.{LevelDbFactory, VLDBWrapper, VersionalLevelDBCompanion}
import encry.utils.FileHelper
import encry.view.fast.sync.SnapshotHolder
import encry.view.state.UtxoState
import encry.view.state.avlTree.AvlTree
import org.encryfoundation.common.utils.TaggedTypes.Height
import org.iq80.leveldb.{DB, Options}
import org.openjdk.jmh.annotations.{Benchmark, Mode, Scope, State}
import org.openjdk.jmh.infra.Blackhole
import org.openjdk.jmh.profile.GCProfiler
import org.openjdk.jmh.runner.{Runner, RunnerException}
import org.openjdk.jmh.runner.options.{OptionsBuilder, TimeValue, VerboseMode}
import scorex.utils.Random

class SnapshotAssemblerBench {

  
  @Benchmark
  def createTree(stateBench: SnapshotAssemblerBenchState, bh: Blackhole): Unit = {
    bh.consume {
      //stateBench.a.initializeSnapshotData(stateBench.block1)
    }
  }
}
object SnapshotAssemblerBench {

  @throws[RunnerException]
  def main(args: Array[String]): Unit = {
    val opt = new OptionsBuilder()
      .include(".*" + classOf[SnapshotAssemblerBench].getSimpleName + ".*")
      .forks(1)
      .threads(1)
      .warmupIterations(benchSettings.benchesSettings.warmUpIterations)
      .measurementIterations(benchSettings.benchesSettings.measurementIterations)
      .mode(Mode.AverageTime)
      .timeUnit(TimeUnit.SECONDS)
      .verbosity(VerboseMode.EXTRA)
      .addProfiler(classOf[GCProfiler])
      .warmupTime(TimeValue.milliseconds(benchSettings.benchesSettings.warmUpTime))
      .measurementTime(TimeValue.milliseconds(benchSettings.benchesSettings.measurementTime))
      .build
    new Runner(opt).run
  }

  @State(Scope.Benchmark)
  class SnapshotAssemblerBenchState extends Settings {

    val a: AvlTree[StorageKey, StorageValue] =
      createAvl("9gKDVmfsA6J4b78jDBx6JmS86Zph98NnjnUqTJBkW7zitQMReia", 0, 500000)
    val block1                              = Utils.generateGenesisBlock(Height @@ 1)


    def createAvl(address: String, from: Int, to: Int): AvlTree[StorageKey, StorageValue] = {
      val firstDir: File = FileHelper.getRandomTempDir
      val firstStorage: VLDBWrapper = {
        val levelDBInit = LevelDbFactory.factory.open(firstDir, new Options)
        VLDBWrapper(VersionalLevelDBCompanion(levelDBInit, settings.levelDB, keySize = 32))
      }
      val dir: File = FileHelper.getRandomTempDir
      val levelDb: DB = LevelDbFactory.factory.open(dir, new Options)
      val rootNodesStorage = RootNodesStorage[StorageKey, StorageValue](levelDb, 10, dir)

      val firstAvl: AvlTree[StorageKey, StorageValue] = AvlTree[StorageKey, StorageValue](firstStorage, rootNodesStorage)
      val avlNew = (from to to).foldLeft(firstAvl) { case (avl, i) =>
        val bx = Utils.genAssetBox(address, i, nonce = i)
        val b = (StorageKey !@@ bx.id, StorageValue @@ bx.bytes)
        avl.insertAndDeleteMany(StorageVersion @@ Random.randomBytes(), List(b), List.empty)
      }
      avlNew
    }

    def tmpDir: File = FileHelper.getRandomTempDir
  }

} 
Example 16
Source File: JavaSerializationBenchmark.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package rpc.akka.serialization

import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream}

import org.openjdk.jmh.annotations.{Benchmark, BenchmarkMode, Fork, Measurement, Mode, Scope, State, Warmup}
import org.openjdk.jmh.infra.Blackhole


@Warmup(iterations = 5)
@Measurement(iterations = 20)
@Fork(1)
@BenchmarkMode(Array(Mode.Throughput))
@State(Scope.Thread)
class JavaSerializationBenchmark {

  val something = Something(42, Nested(4 :: 8 :: 15 :: 16 :: 23 :: 42 :: Nil, 0), "lol")
  val array = {
    val baos = new ByteArrayOutputStream()
    val o = new ObjectOutputStream(baos)

    o.writeObject(something)
    o.close()

    baos.toByteArray
  }

  @Benchmark
  def byteStringOutput(): Something = {
    val baos = new ByteArrayOutputStream()
    val o = new ObjectOutputStream(baos)

    o.writeObject(something)
    o.close()

    val array = baos.toByteArray

    new ObjectInputStream(new ByteArrayInputStream(array)).readObject().asInstanceOf[Something]
  }

  @Benchmark
  def writeTest(): Array[Byte] = {
    val baos = new ByteArrayOutputStream()
    val o = new ObjectOutputStream(baos)

    o.writeObject(something)
    o.close()

    baos.toByteArray
  }

  @Benchmark
  def readTest(): Something = {
    new ObjectInputStream(new ByteArrayInputStream(array)).readObject().asInstanceOf[Something]
  }
} 
Example 17
Source File: BsonInputOutputBenchmark.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package mongo

import java.io.StringWriter
import java.nio.ByteBuffer

import com.avsystem.commons.rpc.akka.serialization.{Nested, Something}
import org.bson.io.BasicOutputBuffer
import org.bson.json.{JsonReader, JsonWriter}
import org.bson.{BsonBinaryReader, BsonBinaryWriter, BsonDocument, BsonDocumentReader, BsonDocumentWriter, BsonReader, BsonValue, BsonWriter}
import org.openjdk.jmh.annotations.{Benchmark, BenchmarkMode, Fork, Measurement, Mode, Scope, State, Warmup}

@Warmup(iterations = 10)
@Measurement(iterations = 20)
@Fork(1)
@BenchmarkMode(Array(Mode.Throughput))
@State(Scope.Thread)
class BsonInputOutputBenchmark {
  private val something = Something(42, Nested(List(4, 8, 15, 16, 23, 42, 0), 131), "lol")
  private val bytes = binaryEncode(something)
  private val doc = documentEncode(something)
  private val json = jsonEncode(something)

  def write(something: Something, bsonWriter: BsonWriter): Unit = {
    val output = new BsonWriterOutput(bsonWriter)
    Something.codec.write(output, something)
  }

  def binaryEncode(something: Something): Array[Byte] = {
    val bsonOutput = new BasicOutputBuffer()
    write(something, new BsonBinaryWriter(bsonOutput))
    bsonOutput.toByteArray
  }

  def documentEncode(something: Something): BsonDocument = {
    val doc = new BsonDocument()
    write(something, new BsonDocumentWriter(doc))
    doc
  }

  def jsonEncode(something: Something): String = {
    val stringWriter = new StringWriter()
    write(something, new JsonWriter(stringWriter))
    stringWriter.toString
  }

  @Benchmark
  def binaryEncoding(): Array[Byte] = {
    binaryEncode(something)
  }

  @Benchmark
  def documentEncoding(): BsonDocument = {
    documentEncode(something)
  }

  @Benchmark
  def jsonEncoding(): String = {
    jsonEncode(something)
  }

  @Benchmark
  def valueEncoding(): BsonValue = {
    BsonValueOutput.write(something)
  }

  def read(bsonReader: BsonReader): Something = {
    val input = new BsonReaderInput(bsonReader)
    Something.codec.read(input)
  }

  @Benchmark
  def binaryDecoding(): Something = {
    read(new BsonBinaryReader(ByteBuffer.wrap(bytes)))
  }

  @Benchmark
  def documentDecoding(): Something = {
    read(new BsonDocumentReader(doc))
  }

  @Benchmark
  def jsonDecoding(): Something = {
    read(new JsonReader(json))
  }
} 
Example 18
Source File: BsonCodecBenchmark.scala    From scala-commons   with MIT License 5 votes vote down vote up
package com.avsystem.commons
package mongo

import java.nio.ByteBuffer

import com.avsystem.commons.rpc.akka.serialization.{Nested, Something}
import org.bson.codecs.{BsonDocumentCodec, DecoderContext, EncoderContext}
import org.bson.io.BasicOutputBuffer
import org.bson.{BsonArray, BsonBinaryReader, BsonBinaryWriter, BsonDocument, BsonInt32, BsonString}
import org.openjdk.jmh.annotations.{Benchmark, BenchmarkMode, Fork, Measurement, Mode, Scope, State, Warmup}

@Warmup(iterations = 10)
@Measurement(iterations = 20)
@Fork(1)
@BenchmarkMode(Array(Mode.Throughput))
@State(Scope.Thread)
class BsonCodecBenchmark {

  import BsonCodecBenchmark._

  private val something = Something(42, Nested(List(4, 8, 15, 16, 23, 42, 0), 131), "lol")
  private val doc = somethingCodec.toDocument(something)
  private val bytes = binaryEncode(something)

  def binaryEncode(something: Something): Array[Byte] = {
    val output = new BasicOutputBuffer()
    val writer = new BsonBinaryWriter(output)
    val doc = somethingCodec.toDocument(something)
    bsonDocumentCodec.encode(writer, doc.toBson, EncoderContext.builder().build())
    output.toByteArray
  }

  @Benchmark
  def binaryEncoding(): Array[Byte] = {
    binaryEncode(something)
  }

  @Benchmark
  def binaryDecoding(): Something = {
    val reader = new BsonBinaryReader(ByteBuffer.wrap(bytes))
    val doc = bsonDocumentCodec.decode(reader, DecoderContext.builder().build())
    somethingCodec.fromDocument(new Doc(doc))
  }

  @Benchmark
  def encoding(): Doc = {
    somethingCodec.toDocument(something)
  }

  @Benchmark
  def decoding(): Something = {
    somethingCodec.fromDocument(doc)
  }
}

object BsonCodecBenchmark {

  import BsonCodec._

  val bsonDocumentCodec = new BsonDocumentCodec()

  val intKey: DocKey[Int, BsonInt32] = int32.key("int")
  val strKey: DocKey[String, BsonString] = string.key("str")
  val listKey: DocKey[List[Int], BsonArray] = int32.collection[List].key("list")

  val nestedCodec = new DocumentCodec[Nested] {
    override def toDocument(t: Nested): Doc = Doc()
      .put(listKey, t.list)
      .put(intKey, t.int)

    override def fromDocument(doc: Doc) = Nested(
      list = doc.require(listKey),
      int = doc.require(intKey)
    )
  }

  val nestedKey: DocKey[Nested, BsonDocument] = nestedCodec.bsonCodec.key("nested")

  val somethingCodec = new DocumentCodec[Something] {
    override def toDocument(t: Something): Doc = Doc()
      .put(intKey, t.int)
      .put(nestedKey, t.nested)
      .put(strKey, t.str)

    override def fromDocument(doc: Doc): Something = Something(
      int = doc.require(intKey),
      nested = doc.require(nestedKey),
      str = doc.require(strKey)
    )
  }
} 
Example 19
Source File: JmhReaderBench.scala    From tethys   with Apache License 2.0 5 votes vote down vote up
package json.bench

import java.util.concurrent.TimeUnit

import json.bench.model.Data
import json.bench.tethysjson.TethysBench.TethysJacksonDataProcessor
import org.openjdk.jmh.annotations.{State, _}

@BenchmarkMode(Array(Mode.Throughput))
@OutputTimeUnit(TimeUnit.SECONDS)
@Warmup(iterations = 4, time = 5, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 4, time = 5, timeUnit = TimeUnit.SECONDS)
@Fork(value = 1, jvmArgsAppend = Array("-Xms1G", "-Xmx1G"))
@State(Scope.Benchmark)
class JmhReaderBench {
  @Param(Array(
    "128b",
    "1kb",
    "128kb",
    "1mb",
    "32mb"
  ))
  var jsonSize: String = _

  val seed = 10000

  var data: String =_

  @Setup(Level.Trial)
  def setup(): Unit = {
    val entities = jsonSize match {
      case "128b" => Data.dataSamples(1, seed)
      case "1kb" => Data.dataSamples(8, seed)
      case "128kb" => Data.dataSamples(128 * 8, seed)
      case "1mb" => Data.dataSamples(8 * 128 * 8, seed)
      case "32mb" => Data.dataSamples(32 * 8 * 128 * 8, seed)
    }
    data = TethysJacksonDataProcessor.write(entities)
  }

  @Param(Array(
    "tethys-jackson",
    "pure-jackson",
    "circe-jawn",
    "circe-jackson",
    "json4s-jackson",
    "json4s-native",
    "play-json",
    "spray-json"
  ))
  var processorName: String = _

  @Benchmark
  def bench: Seq[Data] = {
    DataReader.instances(processorName).read(data)
  }
} 
Example 20
Source File: FastFiloRowReaderBenchmark.scala    From filo   with Apache License 2.0 5 votes vote down vote up
package org.velvia.filo

import java.sql.Timestamp
import org.openjdk.jmh.annotations.Benchmark
import org.openjdk.jmh.annotations.BenchmarkMode
import org.openjdk.jmh.annotations.{Mode, State, Scope}
import org.openjdk.jmh.annotations.OutputTimeUnit
import scalaxy.loops._
import scala.language.postfixOps

import java.util.concurrent.TimeUnit


@State(Scope.Thread)
class FastFiloRowReaderBenchmark {
  import VectorReader._

  // Ok, create an IntColumn and benchmark it.
  val numValues = 10000

  val randomInts = (0 until numValues).map(i => util.Random.nextInt)
  val randomLongs = randomInts.map(_.toLong)
  val randomTs = randomLongs.map(l => new Timestamp(l))

  val chunks = Array(VectorBuilder(randomInts).toFiloBuffer,
                     VectorBuilder(randomLongs).toFiloBuffer,
                     VectorBuilder(randomTs).toFiloBuffer)
  val clazzes = Array[Class[_]](classOf[Int], classOf[Long], classOf[Timestamp])

  // According to @ktosopl, be sure to return some value if possible so that JVM won't
  // optimize out the method body.  However JMH is apparently very good at avoiding this.
  // fastest loop possible using FiloVectorApply method
  @Benchmark
  @BenchmarkMode(Array(Mode.AverageTime))
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  def createFastFiloRowReader(): RowReader = {
    new FastFiloRowReader(chunks, clazzes)
  }

  val fastReader = new FastFiloRowReader(chunks, clazzes)

  @Benchmark
  @BenchmarkMode(Array(Mode.Throughput))
  @OutputTimeUnit(TimeUnit.SECONDS)
  def fastFiloRowReaderReadOne(): Int = {
    fastReader.setRowNo(0)
    if (fastReader.notNull(0)) fastReader.getInt(0) + 1 else 0
  }
} 
Example 21
Source File: BasicFiloBenchmark.scala    From filo   with Apache License 2.0 5 votes vote down vote up
package org.velvia.filo

import org.openjdk.jmh.annotations.Benchmark
import org.openjdk.jmh.annotations.BenchmarkMode
import org.openjdk.jmh.annotations.{Mode, State, Scope}
import org.openjdk.jmh.annotations.OutputTimeUnit
import scalaxy.loops._
import scala.language.postfixOps

import java.util.concurrent.TimeUnit


@State(Scope.Thread)
class BasicFiloBenchmark {
  import VectorReader._
  import vectors.IntBinaryVector

  // Ok, create an IntColumn and benchmark it.
  val numValues = 10000

  val randomInts = (0 until numValues).map(i => util.Random.nextInt)
  val randomIntsAray = randomInts.toArray
  val filoBuffer = VectorBuilder(randomInts).toFiloBuffer
  val sc = FiloVector[Int](filoBuffer)

  val ivbuilder = IntBinaryVector.appendingVectorNoNA(numValues)
  randomInts.foreach(ivbuilder.addData)
  val iv = IntBinaryVector(ivbuilder.base, ivbuilder.offset, ivbuilder.numBytes)

  val byteFiloBuf = VectorBuilder(randomInts.map(_ % 128)).toFiloBuffer
  val byteVect = FiloVector[Int](byteFiloBuf)

  val diffFiloBuf = VectorBuilder(randomInts.map(10000 + _ % 128)).toFiloBuffer
  val diffVect = FiloVector[Int](diffFiloBuf)

  // According to @ktosopl, be sure to return some value if possible so that JVM won't
  // optimize out the method body.  However JMH is apparently very good at avoiding this.
  // fastest loop possible using FiloVectorApply method
  @Benchmark
  @BenchmarkMode(Array(Mode.AverageTime))
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  def sumAllIntsFiloApply(): Int = {
    var total = 0
    for { i <- 0 until numValues optimized } {
      total += sc(i)
    }
    total
  }

  @Benchmark
  @BenchmarkMode(Array(Mode.AverageTime))
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  def sumAllIntsBinaryVectApply(): Int = {
    var total = 0
    for { i <- 0 until numValues optimized } {
      total += iv(i)
    }
    total
  }

  @Benchmark
  @BenchmarkMode(Array(Mode.AverageTime))
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  def sumAllIntsFiloByteApply(): Int = {
    var total = 0
    for { i <- 0 until numValues optimized } {
      total += byteVect(i)
    }
    total
  }

  @Benchmark
  @BenchmarkMode(Array(Mode.AverageTime))
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  def sumAllIntsFiloDiffApply(): Int = {
    var total = 0
    for { i <- 0 until numValues optimized } {
      total += diffVect(i)
    }
    total
  }

  @Benchmark
  @BenchmarkMode(Array(Mode.AverageTime))
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  def sumAllNotNullIntsFiloApply(): Int = {
    var total = 0
    for { i <- 0 until numValues optimized } {
      if (sc.isAvailable(i)) total += sc(i)
    }
    total
  }

  // sum which uses foreach from FiloVector
  @Benchmark
  @BenchmarkMode(Array(Mode.AverageTime))
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  def sumAllIntsFiloForeachFoldLeft(): Int = {
    sc.foldLeft(0)(_ + _)
  }
} 
Example 22
Source File: UTF8StringBenchmark.scala    From filo   with Apache License 2.0 5 votes vote down vote up
package org.velvia.filo

import org.openjdk.jmh.annotations.Benchmark
import org.openjdk.jmh.annotations.BenchmarkMode
import org.openjdk.jmh.annotations.{Mode, State, Scope}
import org.openjdk.jmh.annotations.OutputTimeUnit

import java.util.concurrent.TimeUnit


@State(Scope.Thread)
class UTF8StringBenchmark {

  val str = "xylophonemania"
  val str2 = "xylophonemaniac"
  val zcStr = ZeroCopyUTF8String(str)
  val zcStr2 = ZeroCopyUTF8String(str2)

  // According to @ktosopl, be sure to return some value if possible so that JVM won't
  // optimize out the method body.  However JMH is apparently very good at avoiding this.
  // fastest loop possible using FiloVectorApply method
  @Benchmark
  @BenchmarkMode(Array(Mode.Throughput))
  @OutputTimeUnit(TimeUnit.SECONDS)
  def utf8StrCompare(): Int = {
    zcStr.compare(zcStr2)
  }

  @Benchmark
  @BenchmarkMode(Array(Mode.Throughput))
  @OutputTimeUnit(TimeUnit.SECONDS)
  def nativeStrCompare(): Int = {
    str.compare(str2)
  }

  @Benchmark
  @BenchmarkMode(Array(Mode.Throughput))
  @OutputTimeUnit(TimeUnit.SECONDS)
  def utf8Substring(): ZeroCopyUTF8String = {
    zcStr.substring(2, 6)
  }

  @Benchmark
  @BenchmarkMode(Array(Mode.Throughput))
  @OutputTimeUnit(TimeUnit.SECONDS)
  def nativeSubstring(): String = {
    str.substring(2, 6)
  }

  @Benchmark
  @BenchmarkMode(Array(Mode.Throughput))
  @OutputTimeUnit(TimeUnit.SECONDS)
  def utf8hash(): Int = {
    zcStr.hashCode
  }
} 
Example 23
Source File: DictStringBenchmark.scala    From filo   with Apache License 2.0 5 votes vote down vote up
package org.velvia.filo

import org.openjdk.jmh.annotations.Benchmark
import org.openjdk.jmh.annotations.BenchmarkMode
import org.openjdk.jmh.annotations.{Mode, State, Scope}
import org.openjdk.jmh.annotations.OutputTimeUnit
import scalaxy.loops._
import scala.language.postfixOps

import java.util.concurrent.TimeUnit


@State(Scope.Thread)
class DictStringBenchmark {
  import scala.util.Random.{alphanumeric, nextInt, nextFloat}
  import VectorReader._

  val numValues = 10000
  // NOTE: results show that time spent is heavily influenced by ratio of unique strings...
  val numUniqueStrings = 500
  val maxStringLength = 15
  val minStringLength = 5
  val naChance = 0.05    //5% of values will be NA

  def randString(len: Int): String = alphanumeric.take(len).mkString

  val uniqueStrings = (0 until numUniqueStrings).map { i =>
    randString(minStringLength + nextInt(maxStringLength - minStringLength))
  }
  val randomStrings = (0 until numValues).map(i => uniqueStrings(nextInt(numUniqueStrings)))
  val filoBufferNoNA = VectorBuilder(randomStrings).toFiloBuffer
  val scNoNA = FiloVector[String](filoBufferNoNA)

  def shouldNA: Boolean = nextFloat < naChance

  val filoBufferNA = VectorBuilder.fromOptions(
                       randomStrings.map(str => if (shouldNA) None else Some(str))
                     ).toFiloBuffer
  val scNA = FiloVector[String](filoBufferNA)

  @Benchmark
  @BenchmarkMode(Array(Mode.AverageTime))
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  def rawStringLengthTotal(): Int = {
    var totalLen = 0
    for { i <- 0 until numValues optimized } {
      totalLen += scNoNA(i).length
    }
    totalLen
  }

  // TODO: also a benchmark for the foreach/fold of a column with no NA's?

  @Benchmark
  @BenchmarkMode(Array(Mode.AverageTime))
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  // Measures foreach and NA read speed
  def withNAlengthTotal(): Unit = {
    var totalLen = 0
    scNA.foreach { str => totalLen += str.length }
    totalLen
  }
} 
Example 24
Source File: ScalaReadBenchmark.scala    From filo   with Apache License 2.0 5 votes vote down vote up
package org.velvia.filo

import org.openjdk.jmh.annotations.Benchmark
import org.openjdk.jmh.annotations.BenchmarkMode
import org.openjdk.jmh.annotations.{Mode, State, Scope}
import org.openjdk.jmh.annotations.OutputTimeUnit
import scalaxy.loops._
import scala.language.postfixOps

import java.util.concurrent.TimeUnit


@State(Scope.Thread)
class ScalaReadBenchmark {
  // Ok, create an IntColumn and benchmark it.
  val numValues = 10000

  val randomInts = (0 until numValues).map(i => util.Random.nextInt)
  val randomIntsAray = randomInts.toArray

  // Scala Seq sum
  @Benchmark
  @BenchmarkMode(Array(Mode.AverageTime))
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  def sumAllIntsScalaSeqFoldLeft(): Int = {
    randomInts.foldLeft(0)(_ + _)
  }

  @Benchmark
  @BenchmarkMode(Array(Mode.AverageTime))
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  def sumAllIntsScalaArrayFoldLeft(): Int = {
    randomIntsAray.foldLeft(0)(_ + _)
  }

  @Benchmark
  @BenchmarkMode(Array(Mode.AverageTime))
  @OutputTimeUnit(TimeUnit.MICROSECONDS)
  def sumAllIntsScalaArrayWhileLoop(): Int = {
    var total = 0
    for { i <- 0 until numValues optimized } {
      total += randomIntsAray(i)
    }
    total
  }
} 
Example 25
Source File: FiberRefBenchmarks.scala    From zio   with Apache License 2.0 5 votes vote down vote up
package zio

import java.util.concurrent.TimeUnit

import org.openjdk.jmh.annotations.Benchmark
import org.openjdk.jmh.annotations.BenchmarkMode
import org.openjdk.jmh.annotations.Fork
import org.openjdk.jmh.annotations.Measurement
import org.openjdk.jmh.annotations.Mode
import org.openjdk.jmh.annotations.OutputTimeUnit
import org.openjdk.jmh.annotations.Param
import org.openjdk.jmh.annotations.Scope
import org.openjdk.jmh.annotations.State
import org.openjdk.jmh.annotations.Threads
import org.openjdk.jmh.annotations.Warmup

import zio.IOBenchmarks.verify

@State(Scope.Thread)
@BenchmarkMode(Array(Mode.Throughput))
@OutputTimeUnit(TimeUnit.SECONDS)
@Warmup(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Measurement(iterations = 5, time = 1, timeUnit = TimeUnit.SECONDS)
@Fork(1)
@Threads(1)
class FiberRefBenchmarks {
  @Param(Array("32"))
  var n: Int = _

  @Benchmark
  def tracedCreateUpdateAndRead(): Unit =
    createUpdateAndRead(IOBenchmarks.TracedRuntime)

  @Benchmark
  def unTracedCreateUpdateAndRead(): Unit =
    createUpdateAndRead(IOBenchmarks)

  @Benchmark
  def unTracedJustYield(): Unit =
    justYield(IOBenchmarks)

  @Benchmark
  def unTracedCreateFiberRefsAndYield(): Unit =
    createFiberRefsAndYield(IOBenchmarks)

  private def justYield(runtime: Runtime[Any]) = runtime.unsafeRun {
    for {
      _ <- ZIO.foreach_(1.to(n))(_ => ZIO.yieldNow)
    } yield ()
  }

  private def createFiberRefsAndYield(runtime: Runtime[Any]) = runtime.unsafeRun {
    for {
      fiberRefs <- ZIO.foreach(1.to(n))(i => FiberRef.make(i))
      _         <- ZIO.foreach_(1.to(n))(_ => ZIO.yieldNow)
      values    <- ZIO.foreachPar(fiberRefs)(_.get)
      _         <- verify(values == 1.to(n))(s"Got $values")
    } yield ()
  }

  private def createUpdateAndRead(runtime: Runtime[Any]) = runtime.unsafeRun {
    for {
      fiberRefs <- ZIO.foreach(1.to(n))(i => FiberRef.make(i))
      values1   <- ZIO.foreachPar(fiberRefs)(ref => ref.update(-_) *> ref.get)
      values2   <- ZIO.foreachPar(fiberRefs)(_.get)
      _ <- verify(values1.forall(_ < 0) && values1.size == values2.size)(
            s"Got \nvalues1: $values1, \nvalues2: $values2"
          )
    } yield ()
  }
} 
Example 26
Source File: VersionalLevelDBBanches.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package benches

import java.util.concurrent.TimeUnit
import benches.VersionalLevelDBBanches.VersionalLevelDBState
import encry.settings.LevelDBSettings
import encry.storage.levelDb.versionalLevelDB.{LevelDbFactory, VersionalLevelDBCompanion}
import encry.utils.FileHelper
import org.iq80.leveldb.Options
import org.openjdk.jmh.annotations.{Benchmark, Mode, Scope, State}
import org.openjdk.jmh.infra.Blackhole
import org.openjdk.jmh.profile.GCProfiler
import org.openjdk.jmh.runner.options.{OptionsBuilder, TimeValue, VerboseMode}
import org.openjdk.jmh.runner.{Runner, RunnerException}

class VersionalLevelDBBanches {

  @Benchmark
  def versionalLevelDbInsertion(benchStateHistory: VersionalLevelDBState, bh: Blackhole): Unit = {
    bh.consume {
      val tempDir = FileHelper.getRandomTempDir

      val levelDBInit = LevelDbFactory.factory.open(tempDir, new Options)

      val vldbInit = VersionalLevelDBCompanion(levelDBInit, LevelDBSettings(100))

      benchStateHistory.elems10k.foreach(vldbInit.insert)

      vldbInit.close()
    }
  }
}

object VersionalLevelDBBanches {

  @throws[RunnerException]
  def main(args: Array[String]): Unit = {
    val opt = new OptionsBuilder()
      .include(".*" + classOf[VersionalLevelDBBanches].getSimpleName + ".*")
      .forks(1)
      .threads(2)
      .warmupIterations(1)
      .measurementIterations(1)
      .mode(Mode.AverageTime)
      .timeUnit(TimeUnit.SECONDS)
      .verbosity(VerboseMode.EXTRA)
      .addProfiler(classOf[GCProfiler])
      .warmupTime(TimeValue.milliseconds(500))
      .measurementTime(TimeValue.minutes(5))
      .build
    new Runner(opt).run
  }

  @State(Scope.Benchmark)
  class VersionalLevelDBState {

    //val elems1k = Utils.generateRandomLevelDbElemsWithoutDeletions(1000, 100)
    //val elems5k = Utils.generateRandomLevelDbElemsWithoutDeletions(5000, 100)
    val elems10k = Utils.generateRandomLevelDbElemsWithoutDeletions(10000, 100)
    //val elems30k = Utils.generateRandomLevelDbElemsWithoutDeletions(30000, 100)
  }
} 
Example 27
Source File: StateRollbackBench.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package benches

import java.io.File
import java.util.concurrent.TimeUnit

import benches.StateRollbackBench.StateRollbackState
import benches.Utils._
import encry.storage.VersionalStorage
import encry.utils.CoreTaggedTypes.VersionTag
import encry.view.state.{BoxHolder, UtxoState}
import encryBenchmark.{BenchSettings, Settings}
import org.encryfoundation.common.modifiers.history.Block
import org.encryfoundation.common.modifiers.state.box.AssetBox
import org.encryfoundation.common.utils.TaggedTypes.{ADKey, Difficulty}
import org.openjdk.jmh.annotations.{Benchmark, Mode, Scope, State}
import org.openjdk.jmh.infra.Blackhole
import org.openjdk.jmh.profile.GCProfiler
import org.openjdk.jmh.runner.{Runner, RunnerException}
import org.openjdk.jmh.runner.options.{OptionsBuilder, TimeValue, VerboseMode}

class StateRollbackBench {

  @Benchmark
  def applyBlocksToTheState(stateBench: StateRollbackState, bh: Blackhole): Unit = {
    bh.consume {
      val innerState: UtxoState =
        utxoFromBoxHolder(stateBench.boxesHolder, getRandomTempDir, None, stateBench.settings, VersionalStorage.IODB)
      val newState = stateBench.chain.foldLeft(innerState -> List.empty[VersionTag]) { case ((state, rootHashes), block) =>
        val newState = state.applyModifier(block).right.get
        newState -> (rootHashes :+ newState.version)
      }
      val stateAfterRollback = newState._1.rollbackTo(newState._2.dropRight(1).last, List.empty).get
      val stateAfterForkBlockApplying = stateAfterRollback.applyModifier(stateBench.forkBlocks.last).right.get
      stateAfterForkBlockApplying.close()
    }
  }
}

object StateRollbackBench extends BenchSettings {

  @throws[RunnerException]
  def main(args: Array[String]): Unit = {
    val opt = new OptionsBuilder()
      .include(".*" + classOf[StateRollbackBench].getSimpleName + ".*")
      .forks(1)
      .threads(1)
      .warmupIterations(benchSettings.benchesSettings.warmUpIterations)
      .measurementIterations(benchSettings.benchesSettings.measurementIterations)
      .mode(Mode.AverageTime)
      .timeUnit(TimeUnit.SECONDS)
      .verbosity(VerboseMode.EXTRA)
      .addProfiler(classOf[GCProfiler])
      .warmupTime(TimeValue.milliseconds(benchSettings.benchesSettings.warmUpTime))
      .measurementTime(TimeValue.milliseconds(benchSettings.benchesSettings.measurementTime))
      .build
    new Runner(opt).run
  }

  @State(Scope.Benchmark)
  class StateRollbackState extends encry.settings.Settings {

    val tmpDir: File = getRandomTempDir

    val initialBoxes: IndexedSeq[AssetBox] = (0 until benchSettings.stateBenchSettings.totalBoxesNumber).map(nonce =>
      genHardcodedBox(privKey.publicImage.address.address, nonce)
    )
    val boxesHolder: BoxHolder = BoxHolder(initialBoxes)
    var state: UtxoState = utxoFromBoxHolder(boxesHolder, tmpDir, None, settings, VersionalStorage.LevelDB)
    val genesisBlock: Block = generateGenesisBlockValidForState(state)

    state = state.applyModifier(genesisBlock).right.get

    val stateGenerationResults: (List[(Block, Block)], Block, UtxoState, IndexedSeq[AssetBox]) =
      (0 until benchSettings.stateBenchSettings.blocksNumber).foldLeft(List.empty[(Block, Block)], genesisBlock, state, initialBoxes) {
        case ((blocks, block, stateL, boxes), _) =>
          val nextBlockMainChain: Block = generateNextBlockForStateWithSpendingAllPreviousBoxes(
            block,
            stateL,
            block.payload.txs.flatMap(_.newBoxes.map(_.asInstanceOf[AssetBox])).toIndexedSeq)
          val nextBlockFork: Block = generateNextBlockForStateWithSpendingAllPreviousBoxes(
            block,
            stateL,
            block.payload.txs.flatMap(_.newBoxes.map(_.asInstanceOf[AssetBox])).toIndexedSeq,
            addDiff = Difficulty @@ BigInt(100)
          )
          val stateN: UtxoState = stateL.applyModifier(nextBlockMainChain).right.get
          (blocks :+ (nextBlockMainChain, nextBlockFork),
            nextBlockMainChain,
            stateN,
            boxes.drop(
              benchSettings.stateBenchSettings.transactionsNumberInEachBlock *
                benchSettings.stateBenchSettings.numberOfInputsInOneTransaction)
          )
      }
    val chain: List[Block] = genesisBlock +: stateGenerationResults._1.map(_._1)
    val forkBlocks: List[Block] = genesisBlock +: stateGenerationResults._1.map(_._2)
    state = stateGenerationResults._3
    state.close()
  }
}