scala.collection.mutable.Buffer Scala Examples

The following examples show how to use scala.collection.mutable.Buffer. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: NextIteratorSuite.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 2
Source File: StyledVerbatim.scala    From paradox   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.paradox.markdown

import java.util.function.Consumer

import scala.collection.mutable.Buffer
import scala.collection.JavaConverters._
import org.parboiled.common.StringUtils
import org.pegdown.ast.{ VerbatimGroupNode, VerbatimNode }
import org.pegdown.{ Printer, VerbatimSerializer }


object PrettifyVerbatimSerializer extends StyledVerbatimSerializer {
  override def printPreAttributes(printer: Printer, nodeGroup: String, classes: Buffer[String]): Unit = {
    val allClasses = "prettyprint" +: (nodeGroup match {
      case "" => classes
      case g  => ("group-" + g) +: classes
    })
    printClass(printer, allClasses.mkString(" "))
  }

  override def printCodeAttributes(printer: Printer, nodeType: String): Unit = nodeType match {
    case "text" | "nocode" => printClass(printer, "nocode")
    case _                 => printClass(printer, s"language-$nodeType")
  }
}

object RawVerbatimSerializer extends VerbatimSerializer {

  val tag = "raw"

  override def serialize(node: VerbatimNode, printer: Printer): Unit = {
    printer.println()
    printer.print(node.getText)
    printer.printchkln()
  }
} 
Example 3
Source File: WeedOutMasterActor.scala    From speedo   with Apache License 2.0 5 votes vote down vote up
package com.htc.speedo.akka

import scala.collection.mutable.Buffer

import akka.actor.ActorRef

import com.twitter.scalding.Args

import MasterActor._


  var updateIndex = 0

  override def strategyName = "weed-out"

  override def parseTrainResult(loss: Double) = {
    val needMerge =
      if (lastUpdates.size < maxInterval) {
        // For the first few iterations, always do merge
        lastUpdates += sender
        true
      } else {
        // If the sender exists in [[lastUpdates]], then we consider it not delay
        // and merge its delta into snapshot weight
        val merge = lastUpdates.contains(sender)
        // update the last updated workers in the queue
        lastUpdates(updateIndex) = sender
        merge
      }
    // update the next index in queue
    updateIndex += 1
    if (updateIndex == maxInterval) updateIndex = 0
    // always start training for the worker
    ParsedTrainResult(if (needMerge) MergeResultSender else MergeResultNone)
  }

  override def workerCreated(worker: ActorRef) = {
    lastUpdates.insert(updateIndex, worker) // insert worker as oldest updater
    updateIndex += 1 // update next index
    maxInterval += 1 // the interval is increaed by 1
    super.workerCreated(worker) // start training
  }

  override def workerTerminated(worker: ActorRef) = {
    // remove oldest element
    if (updateIndex < lastUpdates.size) lastUpdates.remove(updateIndex, 1)
    maxInterval -= 1 // the interval is removed by 1
    if (updateIndex == maxInterval) updateIndex = 0 // update next index
  }
} 
Example 4
Source File: CollectionExample.scala    From Scala-and-Spark-for-Big-Data-Analytics   with MIT License 5 votes vote down vote up
package com.chapter3.ScalaFP
import scala.collection._
import scala.collection.mutable.Buffer
import scala.collection.mutable.HashMap

object CollectionExample {
  def main(args: Array[String]) {
    val x = 10
    val y = 15
    val z = 19
    
    Traversable(1, 2, 3)
    Iterable("x", "y", "z")
    Map("x" -> 10, "y" -> 13, "z" -> 17)
    Set("Red", "Green", "Blue")
    SortedSet("Hello,", "world!")
    Buffer(x, y, z)
    IndexedSeq(0.0, 1.0, 2.0)
    LinearSeq(x, y, z)
    List(2, 6, 10)
    HashMap("x" -> 20, "y" -> 19, "z" -> 16)
    
    val list = List(1, 2, 3) map (_ + 1)
    println(list)
    
    val set = Set(1, 2, 3) map (_ * 2)
    println(set)
    
    val list2 = List(x, y, z).map(x => x * 3)
    println(list2)
  }
} 
Example 5
Source File: NextIteratorSuite.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 6
Source File: Base64Codecs.scala    From mimir   with Apache License 2.0 5 votes vote down vote up
package mimir.util

import java.io._
import scala.collection.mutable.Buffer

object SerializationUtils {

  private val base64in = java.util.Base64.getDecoder()
  private val base64out = java.util.Base64.getEncoder()

  def serialize[A](obj:A):Array[Byte] =
  {
    val out = new java.io.ByteArrayOutputStream()
    val objects = new java.io.ObjectOutputStream(out)
    objects.writeObject(obj)
    objects.flush();
    out.toByteArray();
  }
  def serializeToBase64[A](obj:A): String =
  {
    b64encode(serialize(obj))
  }

  def deserialize[A](data: Array[Byte]): A =
  {
    val objects = new java.io.ObjectInputStream(
      new java.io.ByteArrayInputStream(data)
    ) {
        override def resolveClass(desc: java.io.ObjectStreamClass): Class[_] = {
          try { Class.forName(desc.getName, false, getClass.getClassLoader) }
          catch { case ex: ClassNotFoundException => super.resolveClass(desc) }
        }
      }
    objects.readObject().asInstanceOf[A]
  }
  def deserializeFromBase64[A](data: String): A =
  {
    deserialize[A](b64decode(data))
  }

  def b64encode(data: Array[Byte]): String =
    base64out.encodeToString(data)
  def b64decode(data: String): Array[Byte] =
    base64in.decode(data)

  def b64encode(file: File): String =
  {
    val in = new FileInputStream(file);
    val buff = Buffer[Byte]();
    var c = in.read();
    while(c >= 0){
      buff += c.toByte
      c = in.read();
    }
    return b64encode(buff.toArray);
  }

} 
Example 7
Source File: NextIteratorSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 8
Source File: SoftThreadLocal.scala    From hazelcast-scala   with Apache License 2.0 5 votes vote down vote up
package com.hazelcast.Scala.serialization

import java.lang.ref.SoftReference
import scala.collection.mutable.Buffer

private[serialization] class SoftThreadLocal[T <: AnyRef](ctor: => T) {
  private class Instances {
    private[this] def newRef: SoftReference[T] = new SoftReference(ctor)
    private[this] def newRef(t: T): SoftReference[T] = new SoftReference(t)
    private[this] val buffer = Buffer(newRef)
    private[this] var idx = 0
    def use[R](thunk: T => (T, R)): R = {
      if (idx < buffer.length) {
        buffer(idx).get match {
          case null =>
            buffer(idx) = newRef
            use(thunk)
          case t =>
            idx += 1
            val (rt, r) = try thunk(t) finally idx -= 1
            if (t ne rt) buffer(idx) = newRef(rt)
            r
        }
      } else {
        buffer += newRef
        use(thunk)
      }
    }
  }
  private[this] val tl = new ThreadLocal[Instances] {
    override def initialValue = new Instances
  }
  final def use[R](thunk: T => (T, R)): R = tl.get.use(thunk)
} 
Example 9
Source File: NextIteratorSuite.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {//一个迭代器
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {//两个迭代器
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {//空的迭代器
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {//关闭为一次为空的迭代器
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }
  //关闭被称为一次非空迭代
  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    //关闭不调用,直到检查有下一个元素
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 10
Source File: FeatureSelection.scala    From aerosolve   with Apache License 2.0 5 votes vote down vote up
package com.airbnb.aerosolve.training

import java.io.BufferedWriter
import java.io.OutputStreamWriter
import java.util

import com.airbnb.aerosolve.core.{ModelRecord, ModelHeader, FeatureVector, Example}
import com.airbnb.aerosolve.core.models.LinearModel
import com.airbnb.aerosolve.core.util.Util
import com.typesafe.config.Config
import org.slf4j.{LoggerFactory, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD

import scala.collection.mutable.HashMap
import scala.collection.mutable.HashSet
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.Buffer
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.util.Random
import scala.math.abs
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.fs.Path
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path

object FeatureSelection {
  private final val log: Logger = LoggerFactory.getLogger("FeatureSelection")
  val allKey : (String, String) = ("$ALL", "$POS")

  // Given a RDD compute the pointwise mutual information between
  // the positive label and the discrete features.
  def pointwiseMutualInformation(examples : RDD[Example],
                                 config : Config,
                                 key : String,
                                 rankKey : String,
                                 posThreshold : Double,
                                 minPosCount : Double,
                                 newCrosses : Boolean) : RDD[((String, String), Double)] = {
    val pointwise = LinearRankerUtils.makePointwise(examples, config, key, rankKey)
    val features = pointwise
      .mapPartitions(part => {
      // The tuple2 is var, var | positive
      val output = scala.collection.mutable.HashMap[(String, String), (Double, Double)]()
      part.foreach(example =>{
        val featureVector = example.example.get(0)
        val isPos = if (featureVector.floatFeatures.get(rankKey).asScala.head._2 > posThreshold) 1.0
        else 0.0
        val all : (Double, Double) = output.getOrElse(allKey, (0.0, 0.0))
        output.put(allKey, (all._1 + 1.0, all._2 + 1.0 * isPos))

        val features : Array[(String, String)] =
          LinearRankerUtils.getFeatures(featureVector)
        if (newCrosses) {
          for (i <- features) {
            for (j <- features) {
              if (i._1 < j._1) {
                val key = ("%s<NEW>%s".format(i._1, j._1),
                           "%s<NEW>%s".format(i._2, j._2))
                val x = output.getOrElse(key, (0.0, 0.0))
                output.put(key, (x._1 + 1.0, x._2 + 1.0 * isPos))
              }
            }
          }
        }
        for (feature <- features) {
          val x = output.getOrElse(feature, (0.0, 0.0))
          output.put(feature, (x._1 + 1.0, x._2 + 1.0 * isPos))
        }
      })
      output.iterator
    })
    .reduceByKey((a, b) => (a._1 + b._1, a._2 + b._2))
    .filter(x => x._2._2 >= minPosCount)

    val allCount = features.filter(x => x._1.equals(allKey)).take(1).head

    features.map(x => {
      val prob = x._2._1 / allCount._2._1
      val probPos = x._2._2 / allCount._2._2
      (x._1, math.log(probPos / prob) / math.log(2.0))
    })
  }

  // Returns the maximum entropy per family
  def maxEntropy(input : RDD[((String, String), Double)]) : RDD[((String, String), Double)] = {
    input
      .map(x => (x._1._1, (x._1._2, x._2)))
      .reduceByKey((a, b) => if (math.abs(a._2) > math.abs(b._2)) a else b)
      .map(x => ((x._1, x._2._1), x._2._2))
  }
} 
Example 11
Source File: NextIteratorSuite.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 12
Source File: package.scala    From gbf-raidfinder   with MIT License 5 votes vote down vote up
package walfie.gbf.raidfinder.client

import com.thoughtworks.binding
import com.thoughtworks.binding.Binding
import com.thoughtworks.binding.Binding._
import org.scalajs.dom
import org.scalajs.dom.raw._
import scala.collection.mutable.Buffer
import scala.scalajs.js
import walfie.gbf.raidfinder.client.ViewModel.ImageQuality
import walfie.gbf.raidfinder.protocol._

import js.Dynamic.global

package object syntax {
  implicit class HTMLElementOps[T <: HTMLElement](val elem: T) extends AnyVal {
    
    def :=(elements: TraversableOnce[T]): Buffer[T] = {
      buffer.clear()
      buffer ++= elements
    }
  }

  implicit class StringOps(val string: String) extends AnyVal {
    def addIf(condition: Boolean, s: String): String =
      if (condition) s"$string $s" else string
  }

  implicit class LanguageOps(val language: Language) extends AnyVal {
    def shortName: Option[String] = language match {
      case Language.JAPANESE => Some("JP")
      case Language.ENGLISH => Some("EN")
      case _ => None
    }
  }
} 
Example 13
Source File: StreamSplitterMixin.scala    From incubator-daffodil   with Apache License 2.0 5 votes vote down vote up
package org.apache.daffodil.processors.unparsers

import org.apache.daffodil.io.DataOutputStream
import org.apache.daffodil.processors.TermRuntimeData
import org.apache.daffodil.processors.Processor
import org.apache.daffodil.processors.SuspendableOperation
import org.apache.daffodil.exceptions.Assert
import org.apache.daffodil.util.Maybe
import scala.collection.mutable.Buffer

trait StreamSplitter {
  
  override def test(ustate: UState): Boolean = {
    if (secondTime) true
    else {
      secondTime = true
      false
    }
  }

  override def continuation(ustate: UState): Unit = {
    // do nothing.
    //
    // The underlying suspension system will take care of
    // finishing the DOS so everything gets unblocked.
  }
}

object RegionSplitUnparser {
  def apply(trd: TermRuntimeData) = {
    val unp = new RegionSplitUnparser(trd)
    Processor.initialize(unp)
    unp
  }
} 
Example 14
Source File: DomHook.scala    From scastie   with Apache License 2.0 5 votes vote down vote up
package com.olegych.scastie.api
package runtime

import org.scalajs.dom.raw.HTMLElement
import scala.scalajs.js

import scala.collection.mutable.Buffer

import java.util.UUID

trait DomHook {
  private val elements = Buffer.empty[HTMLElement]

  def attach(element: HTMLElement): UUID = {
    val uuid = UUID.randomUUID()
    element.setAttribute("uuid", uuid.toString)
    elements += element
    uuid
  }

  def attachedElements(): js.Array[HTMLElement] = js.Array(elements.toSeq:_*)
} 
Example 15
Source File: NextIteratorSuite.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.FunSuite
import org.scalatest.Matchers

class NextIteratorSuite extends FunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext() = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 16
Source File: NextIteratorSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
} 
Example 17
Source File: RunCliCmd.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package common

import java.io.File

import scala.collection.JavaConverters._
import scala.collection.mutable.Buffer
import org.scalatest.Matchers
import TestUtils._
import scala.concurrent.duration._
import scala.collection.mutable

trait RunCliCmd extends Matchers {

  
  private def hideStr(str: String, hideThese: Seq[String]): String = {
    // Iterate through each string to hide, replacing it in the target string (str)
    hideThese.fold(str)((updatedStr, replaceThis) => updatedStr.replace(replaceThis, "XXXXX"))
  }

  private def reportFailure(args: Buffer[String], ec: Integer, rr: RunResult) = {
    val s = new StringBuilder()
    s.append(args.mkString(" ") + "\n")
    if (rr.stdout.nonEmpty) s.append(rr.stdout + "\n")
    if (rr.stderr.nonEmpty) s.append(rr.stderr)
    s.append("exit code:")
  }
}

object WskAdmin {
  val wskadmin = new RunCliCmd {
    override def baseCommand: mutable.Buffer[String] = WskAdmin.baseCommand
  }

  private val binDir = WhiskProperties.getFileRelativeToWhiskHome("bin")
  private val binaryName = "wskadmin"

  def exists = {
    val dir = binDir
    val exec = new File(dir, binaryName)
    assert(dir.exists, s"did not find $dir")
    assert(exec.exists, s"did not find $exec")
  }

  def baseCommand = {
    Buffer(WhiskProperties.python, new File(binDir, binaryName).toString)
  }

  def listKeys(namespace: String, pick: Integer = 1): List[(String, String)] = {
    wskadmin
      .cli(Seq("user", "list", namespace, "--pick", pick.toString))
      .stdout
      .split("\n")
      .map("""\s+""".r.split(_))
      .map(parts => (parts(0), parts(1)))
      .toList
  }
} 
Example 18
Source File: RunCliCmdTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.common

import java.io.File

import org.junit.runner.RunWith
import org.scalatest.{BeforeAndAfterEach, FlatSpec}
import org.scalatest.junit.JUnitRunner
import common.RunCliCmd
import common.TestUtils._

import scala.collection.mutable.Buffer

@RunWith(classOf[JUnitRunner])
class RunCliCmdTests extends FlatSpec with RunCliCmd with BeforeAndAfterEach {

  case class TestRunResult(code: Int) extends RunResult(code, "", "")
  val defaultRR = TestRunResult(0)

  override def baseCommand = Buffer.empty

  override def runCmd(expectedExitCode: Int,
                      dir: File,
                      env: Map[String, String],
                      fileStdin: Option[File],
                      params: Seq[String]): RunResult = {
    cmdCount += 1
    rr.getOrElse(defaultRR)
  }

  override def beforeEach() = {
    rr = None
    cmdCount = 0
  }

  var rr: Option[TestRunResult] = None // optional run result override per test
  var cmdCount = 0

  it should "retry commands that experience network errors" in {
    Seq(ANY_ERROR_EXIT, DONTCARE_EXIT, NETWORK_ERROR_EXIT).foreach { code =>
      cmdCount = 0

      rr = Some(TestRunResult(NETWORK_ERROR_EXIT))
      noException shouldBe thrownBy {
        cli(Seq.empty, expectedExitCode = code)
      }

      cmdCount shouldBe 3 + 1
    }
  }

  it should "not retry commands if retry is disabled" in {
    rr = Some(TestRunResult(NETWORK_ERROR_EXIT))
    noException shouldBe thrownBy {
      cli(Seq.empty, expectedExitCode = ANY_ERROR_EXIT, retriesOnNetworkError = 0)
    }

    cmdCount shouldBe 1
  }

  it should "not retry commands if failure is not retriable" in {
    Seq(MISUSE_EXIT, ERROR_EXIT, SUCCESS_EXIT).foreach { code =>
      cmdCount = 0

      rr = Some(TestRunResult(code))
      noException shouldBe thrownBy {
        cli(Seq.empty, expectedExitCode = DONTCARE_EXIT, retriesOnNetworkError = 3)
      }

      cmdCount shouldBe 1
    }
  }

} 
Example 19
Source File: StandardDefaultValuesSpec.scala    From sbt-avrohugger   with Apache License 2.0 5 votes vote down vote up
import test._
import org.specs2.mutable.Specification
import java.io.File
import scala.collection.mutable.Buffer
import scala.collection.JavaConverters._

import org.apache.avro.file._
import org.apache.avro.generic._
import org.apache.avro._
class StandardDefaultValuesSpec extends Specification {

  "A case class with default values" should {
    "deserialize correctly" in {
      val record = DefaultTest()

      val enumSchemaString = """{"type":"enum","name":"DefaultEnum","symbols":["SPADES","DIAMONDS","CLUBS","HEARTS"]}"""
      val enumSchema = new Schema.Parser().parse(enumSchemaString)
      val genericEnum = new GenericData.EnumSymbol(enumSchema, record.suit.toString)
      
      val embeddedSchemaString = """{"type":"record","name":"Embedded","fields":[{"name":"inner","type":"int"}]},"default":{"inner":1}}"""
      val embeddedSchema = new Schema.Parser().parse(embeddedSchemaString)
      val embeddedGenericRecord = new GenericData.Record(embeddedSchema)
      embeddedGenericRecord.put("inner", record.embedded.inner)

      val recordSchemaString = """{"type":"record","name":"DefaultTest","namespace":"test","fields":[{"name":"suit","type":{"type":"enum","name":"DefaultEnum","symbols":["SPADES","DIAMONDS","CLUBS","HEARTS"]},"default":"SPADES"},{"name":"number","type":"int","default":0},{"name":"str","type":"string","default":"str"},{"name":"optionString","type":["null","string"],"default":null},{"name":"optionStringValue","type":["string","null"],"default":"default"},{"name":"embedded","type":{"type":"record","name":"Embedded","fields":[{"name":"inner","type":"int"}]},"default":{"inner":1}},{"name":"defaultArray","type":{"type":"array","items":"int"},"default":[1,3,4,5]},{"name":"optionalEnum","type":["null","DefaultEnum"],"default":null},{"name":"defaultMap","type":{"type":"map","values":"string"},"default":{"Hello":"world","Merry":"Christmas"}},{"name":"byt","type":"bytes","default":"ÿ"}, {"name":"defaultEither","type": ["int", "string"],"default":2}, {"name":"defaultCoproduct","type": ["int", "string", "boolean"],"default":3}]}"""
      val recordSchema = new Schema.Parser().parse(recordSchemaString)
      
      val genericRecord = new GenericData.Record(recordSchema)
      genericRecord.put("suit", genericEnum)
    	genericRecord.put("number", record.number)
    	genericRecord.put("str", record.str)
    	genericRecord.put("optionString", record.optionString.getOrElse(null))
      genericRecord.put("optionStringValue", record.optionStringValue.getOrElse(null))
      genericRecord.put("embedded", embeddedGenericRecord)
      genericRecord.put("defaultArray",record.defaultArray.asJava)
      genericRecord.put("optionalEnum", record.optionalEnum.getOrElse(null))
      genericRecord.put("defaultMap", record.defaultMap.asJava)
      genericRecord.put("byt", java.nio.ByteBuffer.wrap(record.byt))
      genericRecord.put("defaultEither", record.defaultEither.fold(identity, identity))
      genericRecord.put("defaultCoproduct", record.defaultCoproduct.select[Int].getOrElse(0))
      val records = List(genericRecord)
      
      val fileName = s"${records.head.getClass.getName}"
      val fileEnding = "avro"
      val file = File.createTempFile(fileName, fileEnding)
      file.deleteOnExit()
      StandardTestUtil.write(file, records)

      var dummyRecord = new GenericDatumReader[GenericRecord]
      val schema = new DataFileReader(file, dummyRecord).getSchema
      val userDatumReader = new GenericDatumReader[GenericRecord](schema)
      val dataFileReader = new DataFileReader[GenericRecord](file, userDatumReader)
      // Adapted from: https://github.com/tackley/avrohugger-list-issue/blob/master/src/main/scala/net/tackley/Reader.scala
      // This isn't great scala, but represents how org.apache.avro.mapred.AvroInputFormat
      // (via org.apache.avro.file.DataFileStream) interacts with the StandardDatumReader.
      var sameRecord: GenericRecord = null.asInstanceOf[GenericRecord]
      while (dataFileReader.hasNext) {
        sameRecord = dataFileReader.next(sameRecord)
      }
      dataFileReader.close()

      sameRecord.get("suit").toString === DefaultEnum.SPADES.toString
      sameRecord.get("number") === 0
      sameRecord.get("str").toString === "str"
      sameRecord.get("optionString") === null
      sameRecord.get("optionStringValue").toString === "default"
      sameRecord.get("embedded").asInstanceOf[GenericRecord].get("inner") === 1
      sameRecord.get("defaultArray") === List(1,3,4,5).asJava
      sameRecord.get("optionalEnum") === null
      sameRecord.get("defaultMap").toString === "{Hello=world, Merry=Christmas}"
      sameRecord.get("byt") === java.nio.ByteBuffer.wrap("ÿ".getBytes)
      sameRecord.get("defaultEither") === 2
      sameRecord.get("defaultCoproduct") === 3
    }
  }
} 
Example 20
Source File: StandardDefaultValuesSpec.scala    From sbt-avrohugger   with Apache License 2.0 5 votes vote down vote up
import test._
import org.specs2.mutable.Specification
import java.io.File
import scala.collection.mutable.Buffer
import scala.collection.JavaConverters._

import org.apache.avro.file._
import org.apache.avro.generic._
import org.apache.avro._
class StandardDefaultValuesSpec extends Specification {
  skipAll
  "A case class with default values" should {
    "deserialize correctly" in {
      val record = DefaultTest()

      val enumSchemaString = """{"type":"enum","name":"DefaultEnum","symbols":["SPADES","DIAMONDS","CLUBS","HEARTS"]}"""
      val enumSchema = new Schema.Parser().parse(enumSchemaString)
      val genericEnum = new GenericData.EnumSymbol(enumSchema, record.suit.toString)
      
      val embeddedSchemaString = """{"type":"record","name":"Embedded","fields":[{"name":"inner","type":"int"}]},"default":{"inner":1}}"""
      val embeddedSchema = new Schema.Parser().parse(embeddedSchemaString)
      val embeddedGenericRecord = new GenericData.Record(embeddedSchema)
      embeddedGenericRecord.put("inner", record.embedded.inner)

      val recordSchemaString = """{"type":"record","name":"DefaultTest","namespace":"test","fields":[{"name":"suit","type":{"type":"enum","name":"DefaultEnum","symbols":["SPADES","DIAMONDS","CLUBS","HEARTS"]},"default":"SPADES"},{"name":"number","type":"int","default":0},{"name":"str","type":"string","default":"str"},{"name":"optionString","type":["null","string"],"default":null},{"name":"optionStringValue","type":["string","null"],"default":"default"},{"name":"embedded","type":{"type":"record","name":"Embedded","fields":[{"name":"inner","type":"int"}]},"default":{"inner":1}},{"name":"defaultArray","type":{"type":"array","items":"int"},"default":[1,3,4,5]},{"name":"optionalEnum","type":["null","DefaultEnum"],"default":null},{"name":"defaultMap","type":{"type":"map","values":"string"},"default":{"Hello":"world","Merry":"Christmas"}},{"name":"byt","type":"bytes","default":"ÿ"}, {"name":"defaultEither","type": ["int", "string"],"default":2}, {"name":"defaultCoproduct","type": ["int", "string", "boolean"],"default":3}]}"""
      val recordSchema = new Schema.Parser().parse(recordSchemaString)
      
      val genericRecord = new GenericData.Record(recordSchema)
      genericRecord.put("suit", genericEnum)
    	genericRecord.put("number", record.number)
    	genericRecord.put("str", record.str)
    	genericRecord.put("optionString", record.optionString.getOrElse(null))
      genericRecord.put("optionStringValue", record.optionStringValue.getOrElse(null))
      genericRecord.put("embedded", embeddedGenericRecord)
      genericRecord.put("defaultArray",record.defaultArray.asJava)
      genericRecord.put("optionalEnum", record.optionalEnum.getOrElse(null))
      genericRecord.put("defaultMap", record.defaultMap.asJava)
      genericRecord.put("byt", java.nio.ByteBuffer.wrap(record.byt))
      genericRecord.put("defaultEither", record.defaultEither.fold(identity, identity))
      genericRecord.put("defaultCoproduct", record.defaultCoproduct.select[Int].getOrElse(0))
      val records = List(genericRecord)
      
      val fileName = s"${records.head.getClass.getName}"
      val fileEnding = "avro"
      val file = File.createTempFile(fileName, fileEnding)
      file.deleteOnExit()
      StandardTestUtil.write(file, records)

      var dummyRecord = new GenericDatumReader[GenericRecord]
      val schema = new DataFileReader(file, dummyRecord).getSchema
      val userDatumReader = new GenericDatumReader[GenericRecord](schema)
      val dataFileReader = new DataFileReader[GenericRecord](file, userDatumReader)
      // Adapted from: https://github.com/tackley/avrohugger-list-issue/blob/master/src/main/scala/net/tackley/Reader.scala
      // This isn't great scala, but represents how org.apache.avro.mapred.AvroInputFormat
      // (via org.apache.avro.file.DataFileStream) interacts with the StandardDatumReader.
      var sameRecord: GenericRecord = null.asInstanceOf[GenericRecord]
      while (dataFileReader.hasNext) {
        sameRecord = dataFileReader.next(sameRecord)
      }
      dataFileReader.close()

      sameRecord.get("suit").toString === DefaultEnum.SPADES.toString
      sameRecord.get("number") === 0
      sameRecord.get("str").toString === "str"
      sameRecord.get("optionString") === null
      sameRecord.get("optionStringValue").toString === "default"
      sameRecord.get("embedded").asInstanceOf[GenericRecord].get("inner") === 1
      sameRecord.get("defaultArray") === List(1,3,4,5).asJava
      sameRecord.get("optionalEnum") === null
      sameRecord.get("defaultMap").toString === "{Hello=world, Merry=Christmas}"
      sameRecord.get("byt") === java.nio.ByteBuffer.wrap("ÿ".getBytes)
      sameRecord.get("defaultEither") === 2
      sameRecord.get("defaultCoproduct") === 3
    }
  }
} 
Example 21
Source File: Conversions.scala    From spark-riak-connector   with Apache License 2.0 5 votes vote down vote up
package com.basho.riak.spark.util.python

import java.util.{List => JList}
import scala.reflect.ClassTag
import scala.collection.JavaConversions._
import scala.collection.mutable.Buffer
import java.util.{List => JList}

object Conversions {
  def asArray[T: ClassTag](c: Any): Array[T] = c match {
    case a: Array[T] => a
    case b: Buffer[T] => b.toArray
    case l: List[T] => l.toArray
    case l: JList[T] => asScalaBuffer(l).toArray
    case _ => throw new IllegalArgumentException(c.getClass() + " can't be converted to an Array")
  }

  def asSeq[T: ClassTag](c: Any): Seq[T] = c match {
    case a: Array[T] => a
    case b: Buffer[T] => b
    case l: List[T] => l
    case l: JList[T] => asScalaBuffer(l).toSeq
    case _ => throw new IllegalArgumentException(c.getClass() + " can't be converted to a Seq")
  }

} 
Example 22
Source File: NextIteratorSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.NoSuchElementException

import scala.collection.mutable.Buffer

import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite

class NextIteratorSuite extends SparkFunSuite with Matchers {
  test("one iteration") {
    val i = new StubIterator(Buffer(1))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("two iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.hasNext should be (true)
    i.next should be (1)
    i.hasNext should be (true)
    i.next should be (2)
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("empty iteration") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    intercept[NoSuchElementException] { i.next() }
  }

  test("close is called once for empty iterations") {
    val i = new StubIterator(Buffer())
    i.hasNext should be (false)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  test("close is called once for non-empty iterations") {
    val i = new StubIterator(Buffer(1, 2))
    i.next should be (1)
    i.next should be (2)
    // close isn't called until we check for the next element
    i.closeCalled should be (0)
    i.hasNext should be (false)
    i.closeCalled should be (1)
    i.hasNext should be (false)
    i.closeCalled should be (1)
  }

  class StubIterator(ints: Buffer[Int])  extends NextIterator[Int] {
    var closeCalled = 0

    override def getNext(): Int = {
      if (ints.size == 0) {
        finished = true
        0
      } else {
        ints.remove(0)
      }
    }

    override def close() {
      closeCalled += 1
    }
  }
}