scala.collection.mutable.Set Scala Examples

The following examples show how to use scala.collection.mutable.Set. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: Converter.scala    From spatial   with MIT License 5 votes vote down vote up
package fringe.templates.math

import chisel3._
import fringe._
import fringe.utils.implicits._
import fringe.utils.getRetimed

import scala.collection.mutable.Set

class fix2fixBox(s1: Boolean, d1: Int, f1: Int, s2: Boolean, d2: Int, f2: Int, rounding: RoundingMode, saturating: OverflowMode, latency: Int, myName: String) extends Module {
  val io = IO(new Bundle {
    val a = Input(UInt((d1+f1).W))
    val expect_pos = Input(Bool())
    val expect_neg = Input(Bool())
    val flow = Input(Bool())
    val b = Output(UInt((d2+f2).W))
  })

  val sign_extend = true // TODO: What cases do we want this false?

  val has_frac = f2 > 0
  val has_dec = d2 > 0
  val up_frac = f2 max 1
  val up_dec = d2 max 1
  val tmp_frac = Wire(UInt(up_frac.W)); tmp_frac := DontCare
  val new_frac = Wire(UInt(up_frac.W)); new_frac := DontCare
  val new_dec = Wire(UInt(up_dec.W)); new_dec := DontCare
  if (!has_frac) tmp_frac := 0.U(1.W)
  if (!has_frac) new_frac := 0.U(1.W)
  if (!has_dec)  new_dec := 0.U(1.W)

  // Compute new frac part
  val shave_f = f1 - f2
  val shave_d = d1 - d2
  if (has_frac) {
    if (f2 < f1) { // shrink decimals
      rounding match {
        case Truncate => tmp_frac := io.a(shave_f + f2 - 1, shave_f)
        case Unbiased =>
          val prng = Module(new PRNG(scala.math.abs(scala.util.Random.nextInt)))
          prng.io.en := true.B
          val salted = io.a + prng.io.output(shave_f - 1, 0)
          tmp_frac := salted(shave_f + f2 - 1, shave_f)
      }
    }
    else if (f2 > f1) { // expand decimals
      val expand = f2 - f1
      if (f1 > 0) tmp_frac := util.Cat(io.a(f1 - 1, 0), 0.U(expand.W))
      else           tmp_frac := 0.U(expand.W)
    }
    else { // keep same
      tmp_frac := io.a(f2 - 1, 0)
    }
  }

  // Compute new dec part (concatenated with frac part from before)
  if (has_dec) {
    if (d2 < d1) { // shrink decimals
      saturating match {
        case Wrapping =>
          // dst.debug_overflow := (0 until shave_d).map{i => io.a(d1 + f1 - 1 - i) }.reduce{_||_}
          new_frac := tmp_frac
          new_dec := io.a(d2 + f1 - 1, f1)
        case Saturating =>
          val sign = io.a.msb
          val overflow = (sign & io.expect_pos) | (!sign & io.expect_neg)
          val not_saturated = (io.a(f1 + d1 - 1, f1 + d1 - 1 - shave_d) === 0.U(shave_d.W)) | (~io.a(f1 + d1 - 1, f1 + d1 - 1 - shave_d) === 0.U(shave_d.W))

          val saturated_frac = Mux(io.expect_pos,
            util.Cat(util.Fill(up_frac, true.B)),
            Mux(io.expect_neg, 0.U(up_frac.W), 0.U(up_frac.W)))
          val saturated_dec = Mux(io.expect_pos,
            util.Cat(~(s2 | s1).B, util.Fill(up_dec - 1, true.B)),
            Mux(io.expect_neg, 1.U((d2).W) << (d2 - 1), 1.U((d2).W) << (d2 - 1)))

          new_frac := Mux(io.a === 0.U, 0.U, Mux(not_saturated & !overflow, tmp_frac, saturated_frac))
          new_dec := Mux(io.a === 0.U, 0.U, Mux(not_saturated & !overflow, io.a(d2 + f1 - 1, f1), saturated_dec))
      }
    }
    else if (d2 > d1) { // expand decimals
      val expand = d2 - d1
      val sgn_extend: Bool = if (s1 & sign_extend) io.a.msb else false.B
      new_frac := tmp_frac
      if (d1 > 0) new_dec  := util.Cat(util.Fill(expand, sgn_extend), io.a(f1 + d1 - 1, f1))
    }
    else { // keep same
      new_frac := tmp_frac
      if (d1 > 0) new_dec := io.a(f1 + d1 - 1, f1)
      // (0 until d2).map{ i => number(i + f)*scala.math.pow(2,i).toInt.U }.reduce{_+_}
    }

  }

  if (has_dec & has_frac)       io.b := getRetimed(chisel3.util.Cat(new_dec, new_frac), latency, io.flow)
  else if (has_dec & !has_frac) io.b := getRetimed(new_dec, latency, io.flow)
  else if (!has_dec & has_frac) io.b := getRetimed(tmp_frac, latency, io.flow)
} 
Example 2
Source File: depgraph.scala    From sbt-blockade   with Apache License 2.0 5 votes vote down vote up
//: ----------------------------------------------------------------------------
//: Copyright 2015 Johannes Rudolph
//:
//: Distributed under the Apache 2.0 License, please see the NOTICE
//: file in the root of the project for further details.
//: ----------------------------------------------------------------------------
package verizon.build

object depgraph {

  import java.io.File
  import sbt._
  import scala.collection.mutable.{HashMap, MultiMap, Set}
  import scala.language.reflectiveCalls

  object SbtUpdateReport {

    type OrganizationArtifactReport = {
      def modules: Seq[ModuleReport]
    }

    def fromConfigurationReport(report: ConfigurationReport, rootInfo: sbt.ModuleID): ModuleGraph = {
      implicit def id(sbtId: sbt.ModuleID): ModuleId = ModuleId(sbtId.organization, sbtId.name, sbtId.revision)

      def moduleEdges(orgArt: OrganizationArtifactReport): Seq[(Module, Seq[Edge])] = {
        val chosenVersion = orgArt.modules.find(!_.evicted).map(_.module.revision)
        orgArt.modules.map(moduleEdge(chosenVersion))
      }

      def moduleEdge(chosenVersion: Option[String])(report: ModuleReport): (Module, Seq[Edge]) = {
        val evictedByVersion = if (report.evicted) chosenVersion else None
        val jarFile = report.artifacts.find(_._1.`type` == "jar").orElse(report.artifacts.find(_._1.extension == "jar")).map(_._2)
        (Module(
          id = report.module,
          license = report.licenses.headOption.map(_._1),
          evictedByVersion = evictedByVersion,
          jarFile = jarFile,
          error = report.problem
        ), report.callers.map(caller ⇒ Edge(caller.caller, report.module)))
      }

      val (nodes, edges) = report.details.flatMap(moduleEdges).unzip
      val root = Module(rootInfo)

      ModuleGraph(root +: nodes, edges.flatten)
    }
  }

  type Edge = (ModuleId, ModuleId)

  def Edge(from: ModuleId, to: ModuleId): Edge = from -> to

  case class ModuleId(organisation: String,
                      name: String,
                      version: String) {
    def idString: String = organisation + ":" + name + ":" + version
  }

  case class Module(id: ModuleId,
                    license: Option[String] = None,
                    extraInfo: String = "",
                    evictedByVersion: Option[String] = None,
                    jarFile: Option[File] = None,
                    error: Option[String] = None) {
    def hadError: Boolean = error.isDefined

    def isUsed: Boolean = !isEvicted

    def isEvicted: Boolean = evictedByVersion.isDefined
  }

  case class ModuleGraph(nodes: Seq[Module], edges: Seq[Edge]) {
    lazy val modules: Map[ModuleId, Module] =
      nodes.map(n ⇒ (n.id, n)).toMap

    def module(id: ModuleId): Module = modules(id)

    lazy val dependencyMap: Map[ModuleId, Seq[Module]] =
      createMap(identity)

    lazy val reverseDependencyMap: Map[ModuleId, Seq[Module]] =
      createMap { case (a, b) ⇒ (b, a) }

    def createMap(bindingFor: ((ModuleId, ModuleId)) ⇒ (ModuleId, ModuleId)): Map[ModuleId, Seq[Module]] = {
      val m = new HashMap[ModuleId, Set[Module]] with MultiMap[ModuleId, Module]
      edges.foreach { entry ⇒
        val (f, t) = bindingFor(entry)
        m.addBinding(f, module(t))
      }
      m.toMap.mapValues(_.toSeq.sortBy(_.id.idString)).withDefaultValue(Nil)
    }

    def roots: Seq[Module] =
      nodes.filter(n ⇒ !edges.exists(_._2 == n.id)).sortBy(_.id.idString)

    def isEmpty: Boolean = nodes.isEmpty
  }

} 
Example 3
Source File: TimeStampedHashSet.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.concurrent.ConcurrentHashMap

import scala.collection.JavaConversions
import scala.collection.mutable.Set

private[spark] class TimeStampedHashSet[A] extends Set[A] {
  val internalMap = new ConcurrentHashMap[A, Long]()

  def contains(key: A): Boolean = {
    internalMap.contains(key)
  }

  def iterator: Iterator[A] = {
    val jIterator = internalMap.entrySet().iterator()
    JavaConversions.asScalaIterator(jIterator).map(_.getKey)
  }

  override def + (elem: A): Set[A] = {
    val newSet = new TimeStampedHashSet[A]
    newSet ++= this
    newSet += elem
    newSet
  }

  override def - (elem: A): Set[A] = {
    val newSet = new TimeStampedHashSet[A]
    newSet ++= this
    newSet -= elem
    newSet
  }

  override def += (key: A): this.type = {
    internalMap.put(key, currentTime)
    this
  }

  override def -= (key: A): this.type = {
    internalMap.remove(key)
    this
  }

  override def empty: Set[A] = new TimeStampedHashSet[A]()

  override def size(): Int = internalMap.size()

  override def foreach[U](f: (A) => U): Unit = {
    val iterator = internalMap.entrySet().iterator()
    while(iterator.hasNext) {
      f(iterator.next.getKey)
    }
  }

  
  def clearOldValues(threshTime: Long) {
    val iterator = internalMap.entrySet().iterator()
    while(iterator.hasNext) {
      val entry = iterator.next()
      if (entry.getValue < threshTime) {
        iterator.remove()
      }
    }
  }

  private def currentTime: Long = System.currentTimeMillis()
} 
Example 4
Source File: Student.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.mediator

import scala.collection.mutable.Map
import scala.collection.mutable.Set

trait Notifiable {
  def notify(message: String)
}

case class Student(name: String, age: Int) extends Notifiable {
  override def notify(message: String): Unit = {
    System.out.println(s"Student $name was notified with message: '$message'.")
  }
}

case class Group(name: String)

trait Mediator {
  def addStudentToGroup(student: Student, group: Group)
  
  def isStudentInGroup(student: Student, group: Group): Boolean
  
  def removeStudentFromGroup(student: Student, group: Group)
  
  def getStudentsInGroup(group: Group): List[Student]
  
  def getGroupsForStudent(student: Student): List[Group]
  
  def notifyStudentsInGroup(group: Group, message: String)
}

class School extends Mediator {
  val studentsToGroups: Map[Student, Set[Group]] = Map()
  val groupsToStudents: Map[Group, Set[Student]] = Map()

  override def addStudentToGroup(student: Student, group: Group): Unit = {
    studentsToGroups.getOrElseUpdate(student, Set()) += group
    groupsToStudents.getOrElseUpdate(group, Set()) += student
  }

  override def isStudentInGroup(student: Student, group: Group): Boolean = 
    groupsToStudents.getOrElse(group, Set()).contains(student) && studentsToGroups.getOrElse(student, Set()).contains(group)

  override def getStudentsInGroup(group: Group): List[Student] = 
    groupsToStudents.getOrElse(group, Set()).toList

  override def getGroupsForStudent(student: Student): List[Group] = 
    studentsToGroups.getOrElse(student, Set()).toList

  override def notifyStudentsInGroup(group: Group, message: String): Unit = {
    groupsToStudents.getOrElse(group, Set()).foreach(_.notify(message))
  }

  override def removeStudentFromGroup(student: Student, group: Group): Unit = {
    studentsToGroups.getOrElse(student, Set()) -= group
    groupsToStudents.getOrElse(group, Set()) -= student
  }
}

object SchoolExample {
  def main(args: Array[String]): Unit = {
    val school = new School
    // create students
    val student1 = Student("Ivan", 26)
    val student2 = Student("Maria", 26)
    val student3 = Student("John", 25)
    // create groups
    val group1 = Group("Scala design patterns")
    val group2 = Group("Databases")
    val group3 = Group("Cloud computing")
    
    school.addStudentToGroup(student1, group1)
    school.addStudentToGroup(student1, group2)
    school.addStudentToGroup(student1, group3)
    
    school.addStudentToGroup(student2, group1)
    school.addStudentToGroup(student2, group3)

    school.addStudentToGroup(student3, group1)
    school.addStudentToGroup(student3, group2)
    
    // notify
    school.notifyStudentsInGroup(group1, "Design patterns in Scala are amazing!")
    
    // see groups
    System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}")
    // remove from group
    school.removeStudentFromGroup(student3, group2)
    System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}")
    
    // see students in group
    System.out.println(s"Students in $group1 are ${school.getStudentsInGroup(group1)}")
  }
} 
Example 5
Source File: Student.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.mediator

import scala.collection.mutable.Map
import scala.collection.mutable.Set

trait Notifiable {
  def notify(message: String)
}

case class Student(name: String, age: Int) extends Notifiable {
  override def notify(message: String): Unit = {
    System.out.println(s"Student $name was notified with message: '$message'.")
  }
}

case class Group(name: String)

trait Mediator {
  def addStudentToGroup(student: Student, group: Group)
  
  def isStudentInGroup(student: Student, group: Group): Boolean
  
  def removeStudentFromGroup(student: Student, group: Group)
  
  def getStudentsInGroup(group: Group): List[Student]
  
  def getGroupsForStudent(student: Student): List[Group]
  
  def notifyStudentsInGroup(group: Group, message: String)
}

class School extends Mediator {
  val studentsToGroups: Map[Student, Set[Group]] = Map()
  val groupsToStudents: Map[Group, Set[Student]] = Map()

  override def addStudentToGroup(student: Student, group: Group): Unit = {
    studentsToGroups.getOrElseUpdate(student, Set()) += group
    groupsToStudents.getOrElseUpdate(group, Set()) += student
  }

  override def isStudentInGroup(student: Student, group: Group): Boolean = 
    groupsToStudents.getOrElse(group, Set()).contains(student) && studentsToGroups.getOrElse(student, Set()).contains(group)

  override def getStudentsInGroup(group: Group): List[Student] = 
    groupsToStudents.getOrElse(group, Set()).toList

  override def getGroupsForStudent(student: Student): List[Group] = 
    studentsToGroups.getOrElse(student, Set()).toList

  override def notifyStudentsInGroup(group: Group, message: String): Unit = {
    groupsToStudents.getOrElse(group, Set()).foreach(_.notify(message))
  }

  override def removeStudentFromGroup(student: Student, group: Group): Unit = {
    studentsToGroups.getOrElse(student, Set()) -= group
    groupsToStudents.getOrElse(group, Set()) -= student
  }
}

object SchoolExample {
  def main(args: Array[String]): Unit = {
    val school = new School
    // create students
    val student1 = Student("Ivan", 26)
    val student2 = Student("Maria", 26)
    val student3 = Student("John", 25)
    // create groups
    val group1 = Group("Scala design patterns")
    val group2 = Group("Databases")
    val group3 = Group("Cloud computing")
    
    school.addStudentToGroup(student1, group1)
    school.addStudentToGroup(student1, group2)
    school.addStudentToGroup(student1, group3)
    
    school.addStudentToGroup(student2, group1)
    school.addStudentToGroup(student2, group3)

    school.addStudentToGroup(student3, group1)
    school.addStudentToGroup(student3, group2)
    
    // notify
    school.notifyStudentsInGroup(group1, "Design patterns in Scala are amazing!")
    
    // see groups
    System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}")
    // remove from group
    school.removeStudentFromGroup(student3, group2)
    System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}")
    
    // see students in group
    System.out.println(s"Students in $group1 are ${school.getStudentsInGroup(group1)}")
  }
} 
Example 6
Source File: Student.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.mediator

import scala.collection.mutable.Map
import scala.collection.mutable.Set

trait Notifiable {
  def notify(message: String)
}

case class Student(name: String, age: Int) extends Notifiable {
  override def notify(message: String): Unit = {
    System.out.println(s"Student $name was notified with message: '$message'.")
  }
}

case class Group(name: String)

trait Mediator {
  def addStudentToGroup(student: Student, group: Group)
  
  def isStudentInGroup(student: Student, group: Group): Boolean
  
  def removeStudentFromGroup(student: Student, group: Group)
  
  def getStudentsInGroup(group: Group): List[Student]
  
  def getGroupsForStudent(student: Student): List[Group]
  
  def notifyStudentsInGroup(group: Group, message: String)
}

class School extends Mediator {
  val studentsToGroups: Map[Student, Set[Group]] = Map()
  val groupsToStudents: Map[Group, Set[Student]] = Map()

  override def addStudentToGroup(student: Student, group: Group): Unit = {
    studentsToGroups.getOrElseUpdate(student, Set()) += group
    groupsToStudents.getOrElseUpdate(group, Set()) += student
  }

  override def isStudentInGroup(student: Student, group: Group): Boolean = 
    groupsToStudents.getOrElse(group, Set()).contains(student) && studentsToGroups.getOrElse(student, Set()).contains(group)

  override def getStudentsInGroup(group: Group): List[Student] = 
    groupsToStudents.getOrElse(group, Set()).toList

  override def getGroupsForStudent(student: Student): List[Group] = 
    studentsToGroups.getOrElse(student, Set()).toList

  override def notifyStudentsInGroup(group: Group, message: String): Unit = {
    groupsToStudents.getOrElse(group, Set()).foreach(_.notify(message))
  }

  override def removeStudentFromGroup(student: Student, group: Group): Unit = {
    studentsToGroups.getOrElse(student, Set()) -= group
    groupsToStudents.getOrElse(group, Set()) -= student
  }
}

object SchoolExample {
  def main(args: Array[String]): Unit = {
    val school = new School
    // create students
    val student1 = Student("Ivan", 26)
    val student2 = Student("Maria", 26)
    val student3 = Student("John", 25)
    // create groups
    val group1 = Group("Scala design patterns")
    val group2 = Group("Databases")
    val group3 = Group("Cloud computing")
    
    school.addStudentToGroup(student1, group1)
    school.addStudentToGroup(student1, group2)
    school.addStudentToGroup(student1, group3)
    
    school.addStudentToGroup(student2, group1)
    school.addStudentToGroup(student2, group3)

    school.addStudentToGroup(student3, group1)
    school.addStudentToGroup(student3, group2)
    
    // notify
    school.notifyStudentsInGroup(group1, "Design patterns in Scala are amazing!")
    
    // see groups
    System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}")
    // remove from group
    school.removeStudentFromGroup(student3, group2)
    System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}")
    
    // see students in group
    System.out.println(s"Students in $group1 are ${school.getStudentsInGroup(group1)}")
  }
} 
Example 7
Source File: Student.scala    From Scala-Design-Patterns-Second-Edition   with MIT License 5 votes vote down vote up
package com.ivan.nikolov.behavioral.mediator

import scala.collection.mutable.Map
import scala.collection.mutable.Set

trait Notifiable {
  def notify(message: String)
}

case class Student(name: String, age: Int) extends Notifiable {
  override def notify(message: String): Unit = {
    System.out.println(s"Student $name was notified with message: '$message'.")
  }
}

case class Group(name: String)

trait Mediator {
  def addStudentToGroup(student: Student, group: Group)
  
  def isStudentInGroup(student: Student, group: Group): Boolean
  
  def removeStudentFromGroup(student: Student, group: Group)
  
  def getStudentsInGroup(group: Group): List[Student]
  
  def getGroupsForStudent(student: Student): List[Group]
  
  def notifyStudentsInGroup(group: Group, message: String)
}

class School extends Mediator {
  val studentsToGroups: Map[Student, Set[Group]] = Map()
  val groupsToStudents: Map[Group, Set[Student]] = Map()

  override def addStudentToGroup(student: Student, group: Group): Unit = {
    studentsToGroups.getOrElseUpdate(student, Set()) += group
    groupsToStudents.getOrElseUpdate(group, Set()) += student
  }

  override def isStudentInGroup(student: Student, group: Group): Boolean = 
    groupsToStudents.getOrElse(group, Set()).contains(student) && studentsToGroups.getOrElse(student, Set()).contains(group)

  override def getStudentsInGroup(group: Group): List[Student] = 
    groupsToStudents.getOrElse(group, Set()).toList

  override def getGroupsForStudent(student: Student): List[Group] = 
    studentsToGroups.getOrElse(student, Set()).toList

  override def notifyStudentsInGroup(group: Group, message: String): Unit = {
    groupsToStudents.getOrElse(group, Set()).foreach(_.notify(message))
  }

  override def removeStudentFromGroup(student: Student, group: Group): Unit = {
    studentsToGroups.getOrElse(student, Set()) -= group
    groupsToStudents.getOrElse(group, Set()) -= student
  }
}

object SchoolExample {
  def main(args: Array[String]): Unit = {
    val school = new School
    // create students
    val student1 = Student("Ivan", 26)
    val student2 = Student("Maria", 26)
    val student3 = Student("John", 25)
    // create groups
    val group1 = Group("Scala design patterns")
    val group2 = Group("Databases")
    val group3 = Group("Cloud computing")
    
    school.addStudentToGroup(student1, group1)
    school.addStudentToGroup(student1, group2)
    school.addStudentToGroup(student1, group3)
    
    school.addStudentToGroup(student2, group1)
    school.addStudentToGroup(student2, group3)

    school.addStudentToGroup(student3, group1)
    school.addStudentToGroup(student3, group2)
    
    // notify
    school.notifyStudentsInGroup(group1, "Design patterns in Scala are amazing!")
    
    // see groups
    System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}")
    // remove from group
    school.removeStudentFromGroup(student3, group2)
    System.out.println(s"$student3 is in groups: ${school.getGroupsForStudent(student3)}")
    
    // see students in group
    System.out.println(s"Students in $group1 are ${school.getStudentsInGroup(group1)}")
  }
} 
Example 8
Source File: TimeStampedHashSet.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.concurrent.ConcurrentHashMap

import scala.collection.JavaConversions
import scala.collection.mutable.Set

private[spark] class TimeStampedHashSet[A] extends Set[A] {
  val internalMap = new ConcurrentHashMap[A, Long]()

  def contains(key: A): Boolean = {
    internalMap.contains(key)
  }

  def iterator: Iterator[A] = {
    val jIterator = internalMap.entrySet().iterator()
    JavaConversions.asScalaIterator(jIterator).map(_.getKey)
  }

  override def + (elem: A): Set[A] = {
    val newSet = new TimeStampedHashSet[A]
    newSet ++= this
    newSet += elem
    newSet
  }

  override def - (elem: A): Set[A] = {
    val newSet = new TimeStampedHashSet[A]
    newSet ++= this
    newSet -= elem
    newSet
  }

  override def += (key: A): this.type = {
    internalMap.put(key, currentTime)
    this
  }

  override def -= (key: A): this.type = {
    internalMap.remove(key)
    this
  }

  override def empty: Set[A] = new TimeStampedHashSet[A]()

  override def size(): Int = internalMap.size()

  override def foreach[U](f: (A) => U): Unit = {
    val iterator = internalMap.entrySet().iterator()
    while(iterator.hasNext) {
      f(iterator.next.getKey)
    }
  }

  
  def clearOldValues(threshTime: Long) {
    val iterator = internalMap.entrySet().iterator()
    while(iterator.hasNext) {
      val entry = iterator.next()
      if (entry.getValue < threshTime) {
        iterator.remove()
      }
    }
  }

  private def currentTime: Long = System.currentTimeMillis()
} 
Example 9
Source File: MutableSet.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package scalaDemo


object MutableSet {
  def main(args: Array[String]):Unit= {
    //可变
    import scala.collection.mutable.Set
    val movieSet = Set("Hitch", "Poltergeist")
    movieSet += "Shrek"
    println(movieSet)
    //不可变
    import scala.collection.immutable.HashSet
    val hashSet = HashSet("Tomatoes", "Chilies")
    println(hashSet + "Coriander")
  }

} 
Example 10
Source File: TimeStampedHashSet.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.concurrent.ConcurrentHashMap

import scala.collection.JavaConversions
import scala.collection.mutable.Set

private[spark] class TimeStampedHashSet[A] extends Set[A] {
  val internalMap = new ConcurrentHashMap[A, Long]()

  def contains(key: A): Boolean = {
    internalMap.contains(key)
  }

  def iterator: Iterator[A] = {
    val jIterator = internalMap.entrySet().iterator()
    JavaConversions.asScalaIterator(jIterator).map(_.getKey)
  }

  override def + (elem: A): Set[A] = {
    val newSet = new TimeStampedHashSet[A]
    newSet ++= this
    newSet += elem
    newSet
  }

  override def - (elem: A): Set[A] = {
    val newSet = new TimeStampedHashSet[A]
    newSet ++= this
    newSet -= elem
    newSet
  }
  //this.type表示当前对象(this)的类型,this指代当前的对象
  override def += (key: A): this.type = {
    internalMap.put(key, currentTime)
    this
  }
  //this.type表示当前对象(this)的类型,this指代当前的对象
  override def -= (key: A): this.type = {
    internalMap.remove(key)
    this
  }

  override def empty: Set[A] = new TimeStampedHashSet[A]()

  override def size(): Int = internalMap.size()

  override def foreach[U](f: (A) => U): Unit = {
    val iterator = internalMap.entrySet().iterator()
    while(iterator.hasNext) {
      f(iterator.next.getKey)
    }
  }

  
  def clearOldValues(threshTime: Long) {
    val iterator = internalMap.entrySet().iterator()
    while(iterator.hasNext) {
      val entry = iterator.next()
      if (entry.getValue < threshTime) {
        iterator.remove()
      }
    }
  }

  private def currentTime: Long = System.currentTimeMillis()
} 
Example 11
Source File: LocalityPlacementStrategySuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.yarn

import scala.collection.JavaConverters._
import scala.collection.mutable.{HashMap, HashSet, Set}

import org.apache.hadoop.yarn.api.records._
import org.apache.hadoop.yarn.conf.YarnConfiguration
import org.mockito.Mockito._

import org.apache.spark.{SparkConf, SparkFunSuite}

class LocalityPlacementStrategySuite extends SparkFunSuite {

  test("handle large number of containers and tasks (SPARK-18750)") {
    // Run the test in a thread with a small stack size, since the original issue
    // surfaced as a StackOverflowError.
    var error: Throwable = null

    val runnable = new Runnable() {
      override def run(): Unit = try {
        runTest()
      } catch {
        case e: Throwable => error = e
      }
    }

    val thread = new Thread(new ThreadGroup("test"), runnable, "test-thread", 32 * 1024)
    thread.start()
    thread.join()

    assert(error === null)
  }

  private def runTest(): Unit = {
    val yarnConf = new YarnConfiguration()

    // The numbers below have been chosen to balance being large enough to replicate the
    // original issue while not taking too long to run when the issue is fixed. The main
    // goal is to create enough requests for localized containers (so there should be many
    // tasks on several hosts that have no allocated containers).

    val resource = Resource.newInstance(8 * 1024, 4)
    val strategy = new LocalityPreferredContainerPlacementStrategy(new SparkConf(),
      yarnConf, resource, new MockResolver())

    val totalTasks = 32 * 1024
    val totalContainers = totalTasks / 16
    val totalHosts = totalContainers / 16

    val mockId = mock(classOf[ContainerId])
    val hosts = (1 to totalHosts).map { i => (s"host_$i", totalTasks % i) }.toMap
    val containers = (1 to totalContainers).map { i => mockId }
    val count = containers.size / hosts.size / 2

    val hostToContainerMap = new HashMap[String, Set[ContainerId]]()
    hosts.keys.take(hosts.size / 2).zipWithIndex.foreach { case (host, i) =>
      val hostContainers = new HashSet[ContainerId]()
      containers.drop(count * i).take(i).foreach { c => hostContainers += c }
      hostToContainerMap(host) = hostContainers
    }

    strategy.localityOfRequestedContainers(containers.size * 2, totalTasks, hosts,
      hostToContainerMap, Nil)
  }

} 
Example 12
Source File: CachedRDDManager.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package edu.ucla.cs.wis.bigdatalog.spark.execution.recursion

import org.apache.spark.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel

import scala.collection.mutable.{HashMap, HashSet, Set}

class CachedRDDManager(defaultStorageLevel: StorageLevel)
  extends Logging with Serializable {

  val iterationToRDDMap = new HashMap[Int, HashSet[RDD[_]]]
  var currentIteration : Int = 0

  def persist(rdd: RDD[_]): Unit = {
    persist(rdd, false)
  }

  def persist(rdd: RDD[_], doMemoryCheckpoint: Boolean): Unit = {
    iterationToRDDMap.getOrElseUpdate(currentIteration, new HashSet[RDD[_]]).add(rdd)
    rdd.persist(defaultStorageLevel)

    if (doMemoryCheckpoint)
      rdd.memoryCheckpoint()
  }

  def cleanUpIteration(iterationsBackToRemove: Int = 2) = {
    val start = System.currentTimeMillis()
    if (currentIteration >= iterationsBackToRemove) {
      val iterationId = currentIteration - iterationsBackToRemove
      if (iterationToRDDMap.contains(iterationId)) {
        val rdds: HashSet[RDD[_]] = iterationToRDDMap.remove(iterationId).get
        if (rdds.nonEmpty)
          logInfo("Unpersisting "+rdds.size+" rdds for iteration " + iterationId)
        rdds.foreach(rdd => rdd.unpersist(false))
      }
    }
    logInfo("CleanUpIteration took " + (System.currentTimeMillis() - start) + " ms")
    currentIteration += 1
  }

  def cleanUpIterationById(iterationId: Int) = {
    if (iterationToRDDMap.contains(iterationId)) {
      val rdds: HashSet[RDD[_]] = iterationToRDDMap.remove(iterationId).get
      rdds.foreach(rdd => rdd.unpersist(false))
    }
  }

  def incrementIteration() { currentIteration += 1}

  def clear() = {
    iterationToRDDMap.clear()
  }

  def clear(remainCached: Seq[RDD[_]]) = {
    iterationToRDDMap.keySet.foreach(key => logInfo("key: " + key + " value: " + iterationToRDDMap.get(key)))

    iterationToRDDMap.keySet
      .foreach(key => iterationToRDDMap.get(key)
      .foreach(value => value.foreach(item => {if (!remainCached.contains(item)) item.unpersist(false)})))

    iterationToRDDMap.clear()
  }

  def unpersist(rdds: Set[RDD[_]]) = {
    for (rdd <- rdds) {
      iterationToRDDMap.synchronized {
        // rdd should only be in 1 iteration
        val iterations = iterationToRDDMap.filter(x => x._2.contains(rdd))
        if (iterations.nonEmpty) {
          val iteration = iterations.head
          iteration._2.remove(rdd)
          rdd.unpersist(false)
          if (iteration._2.isEmpty)
            iterationToRDDMap.remove(iteration._1)
        }
      }
    }
  }

  override def toString = {
    val output = new StringBuilder
    iterationToRDDMap.keySet.toSeq.sorted
      .foreach(iteration => {
        val rdds = iterationToRDDMap.get(iteration)
        rdds.foreach(rdd => output.append(iteration + ":" + rdd + "\n"))
      })
    output.toString()
  }
} 
Example 13
Source File: FixedPointJobDefinition.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.scheduler.fixedpoint

import org.apache.spark.TaskContext
import org.apache.spark.rdd.RDD

import scala.collection.mutable.{HashSet, HashMap, Set}

class FixedPointJobDefinition(val setupIteration: (FixedPointJobDefinition, RDD[_]) => RDD[_],
                              val cleanupIteration: (Int) => Unit) {
  var _fixedPointEvaluator: (TaskContext, Iterator[_]) => Boolean = null
  var finalRDD: RDD[_] = null
  var rddIds = Array.empty[Int] // for all and delta rdd id for FixedPointResultTask execution on worker

  def fixedPointEvaluator(fixedPointEvaluator: (TaskContext, Iterator[_]) => Boolean) = {
    _fixedPointEvaluator = fixedPointEvaluator
  }

  def getfixedPointEvaluator = _fixedPointEvaluator.asInstanceOf[(TaskContext, Iterator[_]) => _]

  def getFinalRDD: RDD[_] = finalRDD

  def setRDDIds(newAllRDDId: Int,
                oldAllRDDId: Int,
                newDeltaPrimeRDDId: Int,
                oldDeltaPrimeRDDId: Int): Unit = {

    rddIds = Array(newAllRDDId, oldAllRDDId, newDeltaPrimeRDDId, oldDeltaPrimeRDDId)
  }
} 
Example 14
Source File: TimeStampedHashSet.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.concurrent.ConcurrentHashMap

import scala.collection.JavaConverters._
import scala.collection.mutable.Set

private[spark] class TimeStampedHashSet[A] extends Set[A] {
  val internalMap = new ConcurrentHashMap[A, Long]()

  def contains(key: A): Boolean = {
    internalMap.contains(key)
  }

  def iterator: Iterator[A] = {
    val jIterator = internalMap.entrySet().iterator()
    jIterator.asScala.map(_.getKey)
  }

  override def + (elem: A): Set[A] = {
    val newSet = new TimeStampedHashSet[A]
    newSet ++= this
    newSet += elem
    newSet
  }

  override def - (elem: A): Set[A] = {
    val newSet = new TimeStampedHashSet[A]
    newSet ++= this
    newSet -= elem
    newSet
  }

  override def += (key: A): this.type = {
    internalMap.put(key, currentTime)
    this
  }

  override def -= (key: A): this.type = {
    internalMap.remove(key)
    this
  }

  override def empty: Set[A] = new TimeStampedHashSet[A]()

  override def size(): Int = internalMap.size()

  override def foreach[U](f: (A) => U): Unit = {
    val iterator = internalMap.entrySet().iterator()
    while(iterator.hasNext) {
      f(iterator.next.getKey)
    }
  }

  
  def clearOldValues(threshTime: Long) {
    val iterator = internalMap.entrySet().iterator()
    while(iterator.hasNext) {
      val entry = iterator.next()
      if (entry.getValue < threshTime) {
        iterator.remove()
      }
    }
  }

  private def currentTime: Long = System.currentTimeMillis()
} 
Example 15
Source File: JoinEmitter.scala    From piglet   with Apache License 2.0 5 votes vote down vote up
package dbis.piglet.codegen.flink.emitter

import dbis.piglet.codegen.{ CodeEmitter, CodeGenContext, CodeGenException }
import dbis.piglet.expr.Ref
import dbis.piglet.op.Join

import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.Set
import dbis.piglet.codegen.scala_lang.ScalaEmitter
import scala.collection.mutable.ListBuffer
import dbis.piglet.codegen.flink.FlinkHelper

class JoinEmitter extends dbis.piglet.codegen.scala_lang.JoinEmitter {
  override def template: String = """    val <out> = <rel1><rels, rel1_keys, rel2_keys:{ r,k1, k2 | .join(<r>).where(<k1>).equalTo(<k2>)}>.map{ 
                                    |      t => 
                                    |        val <pairs> = t
                                    |        <class>(<fields>)
                                    |    }""".stripMargin

  override def code(ctx: CodeGenContext, op: Join): String = {
    if (!op.schema.isDefined)
      throw CodeGenException("schema required in JOIN")

    val res = op.inputs.zip(op.fieldExprs)
    val keys = res.map { case (i, k) => k.map { x => s"_${FlinkHelper.getOrderIndex(i.producer.schema, x)}" } }
    var keysGroup: ListBuffer[(List[String], List[String])] = new ListBuffer
    for (i <- 0 until keys.length - 1) {
      val v = (keys(i), keys(i + 1))
      keysGroup += v
    }
    val keysGroup1 = keysGroup.zipWithIndex.map {
      case (i, k) =>
        if (k > 0)
          (FlinkHelper.printQuote(i._1.map { x => s"_$k.$x" }), FlinkHelper.printQuote(i._2))
        else
          (FlinkHelper.printQuote(i._1), FlinkHelper.printQuote(i._2))
    }
    val keys1 = keysGroup1.map(x => x._1)
    val keys2 = keysGroup1.map(x => x._2)

    val className = op.schema match {
      case Some(s) => ScalaEmitter.schemaClassName(s.className)
      case None => ScalaEmitter.schemaClassName(op.outPipeName)
    }
    var pairs = "(v1,v2)"
    for (i <- 3 to op.inputs.length) {
      pairs = s"($pairs,v$i)"
    }
    val fieldList = ArrayBuffer[String]()
    for (i <- 1 to op.inputs.length) {
      op.inputs(i - 1).producer.schema match {
        case Some(s) => fieldList ++= s.fields.zipWithIndex.map { case (f, k) => s"v$i._$k" }
        case None => fieldList += s"v$i._0"
      }
    }
    render(
      Map("out" -> op.outPipeName,
        "rel1" -> op.inputs.head.name,
        "class" -> className,
        "rels" -> op.inputs.tail.map(_.name),
        "pairs" -> pairs,
        "rel1_keys" -> keys1,
        "rel2_keys" -> keys2,
        "fields" -> fieldList.mkString(", ")))
  }
}

object JoinEmitter {
	lazy val instance = new JoinEmitter
}