java.util.Map.Entry Scala Examples

The following examples show how to use java.util.Map.Entry. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: JavaUtils.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.api.java

import java.{util => ju}
import java.util.Map.Entry

import scala.collection.mutable

private[spark] object JavaUtils {
  def optionToOptional[T](option: Option[T]): Optional[T] =
    if (option.isDefined) {
      Optional.of(option.get)
    } else {
      Optional.empty[T]
    }

  // Workaround for SPARK-3926 / SI-8911
  def mapAsSerializableJavaMap[A, B](underlying: collection.Map[A, B]): SerializableMapWrapper[A, B]
    = new SerializableMapWrapper(underlying)

  // Implementation is copied from scala.collection.convert.Wrappers.MapWrapper,
  // but implements java.io.Serializable. It can't just be subclassed to make it
  // Serializable since the MapWrapper class has no no-arg constructor. This class
  // doesn't need a no-arg constructor though.
  class SerializableMapWrapper[A, B](underlying: collection.Map[A, B])
    extends ju.AbstractMap[A, B] with java.io.Serializable { self =>

    override def size: Int = underlying.size

    override def get(key: AnyRef): B = try {
      underlying.getOrElse(key.asInstanceOf[A], null.asInstanceOf[B])
    } catch {
      case ex: ClassCastException => null.asInstanceOf[B]
    }

    override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
      override def size: Int = self.size

      override def iterator: ju.Iterator[ju.Map.Entry[A, B]] = new ju.Iterator[ju.Map.Entry[A, B]] {
        val ui = underlying.iterator
        var prev : Option[A] = None

        def hasNext: Boolean = ui.hasNext

        def next(): Entry[A, B] = {
          val (k, v) = ui.next()
          prev = Some(k)
          new ju.Map.Entry[A, B] {
            import scala.util.hashing.byteswap32
            override def getKey: A = k
            override def getValue: B = v
            override def setValue(v1 : B): B = self.put(k, v1)
            override def hashCode: Int = byteswap32(k.hashCode) + (byteswap32(v.hashCode) << 16)
            override def equals(other: Any): Boolean = other match {
              case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
              case _ => false
            }
          }
        }

        def remove() {
          prev match {
            case Some(k) =>
              underlying match {
                case mm: mutable.Map[A, _] =>
                  mm.remove(k)
                  prev = None
                case _ =>
                  throw new UnsupportedOperationException("remove")
              }
            case _ =>
              throw new IllegalStateException("next must be called at least once before remove")
          }
        }
      }
    }
  }
} 
Example 2
Source File: SortedMapDeserializerModule.scala    From mango   with Apache License 2.0 5 votes vote down vote up
package com.kakao.shaded.jackson.module.scala.deser

import java.util.AbstractMap
import java.util.Map.Entry

import scala.collection.{mutable, SortedMap}
import scala.collection.immutable.TreeMap

import com.kakao.shaded.jackson.core.JsonParser
import com.kakao.shaded.jackson.databind._
import com.kakao.shaded.jackson.databind.deser.std.{MapDeserializer, ContainerDeserializerBase}
import com.kakao.shaded.jackson.databind.jsontype.TypeDeserializer
import com.kakao.shaded.jackson.databind.`type`.MapLikeType
import com.kakao.shaded.jackson.module.scala.modifiers.MapTypeModifierModule
import deser.{ContextualDeserializer, Deserializers, ValueInstantiator}
import com.kakao.shaded.jackson.module.scala.introspect.OrderingLocator
import scala.language.existentials

private class SortedMapBuilderWrapper[K,V](val builder: mutable.Builder[(K,V), SortedMap[K,V]]) extends AbstractMap[K,V] {
  override def put(k: K, v: V) = { builder += ((k,v)); v }

  // Isn't used by the deserializer
  def entrySet(): java.util.Set[Entry[K, V]] = throw new UnsupportedOperationException
}

private object SortedMapDeserializer {
  def orderingFor = OrderingLocator.locate _

  def builderFor(cls: Class[_], keyCls: JavaType): mutable.Builder[(AnyRef,AnyRef), SortedMap[AnyRef,AnyRef]] =
    if (classOf[TreeMap[_,_]].isAssignableFrom(cls)) TreeMap.newBuilder[AnyRef,AnyRef](orderingFor(keyCls)) else
    SortedMap.newBuilder[AnyRef,AnyRef](orderingFor(keyCls))
}

private class SortedMapDeserializer(
    collectionType: MapLikeType,
    config: DeserializationConfig,
    keyDeser: KeyDeserializer,
    valueDeser: JsonDeserializer[_],
    valueTypeDeser: TypeDeserializer)
  extends ContainerDeserializerBase[SortedMap[_,_]](collectionType)
  with ContextualDeserializer {
  
  private val javaContainerType =
    config.getTypeFactory.constructMapLikeType(classOf[MapBuilderWrapper[_,_]], collectionType.getKeyType, collectionType.getContentType)

  private val instantiator =
    new ValueInstantiator {
      def getValueTypeDesc = collectionType.getRawClass.getCanonicalName
      override def canCreateUsingDefault = true
      override def createUsingDefault(ctx: DeserializationContext) =
        new SortedMapBuilderWrapper[AnyRef,AnyRef](SortedMapDeserializer.builderFor(collectionType.getRawClass, collectionType.getKeyType))
    }

  private val containerDeserializer =
    new MapDeserializer(javaContainerType,instantiator,keyDeser,valueDeser.asInstanceOf[JsonDeserializer[AnyRef]],valueTypeDeser)

  override def getContentType = containerDeserializer.getContentType

  override def getContentDeserializer = containerDeserializer.getContentDeserializer

  override def createContextual(ctxt: DeserializationContext, property: BeanProperty) =
    if (keyDeser != null && valueDeser != null) this
    else {
      val newKeyDeser = Option(keyDeser).getOrElse(ctxt.findKeyDeserializer(collectionType.getKeyType, property))
      val newValDeser = Option(valueDeser).getOrElse(ctxt.findContextualValueDeserializer(collectionType.getContentType, property))
      new SortedMapDeserializer(collectionType, config, newKeyDeser, newValDeser, valueTypeDeser)
    }
  
  override def deserialize(jp: JsonParser, ctxt: DeserializationContext): SortedMap[_,_] = {
    containerDeserializer.deserialize(jp,ctxt) match {
      case wrapper: SortedMapBuilderWrapper[_,_] => wrapper.builder.result()
    }
  }
}

private object SortedMapDeserializerResolver extends Deserializers.Base {
  
  private val SORTED_MAP = classOf[collection.SortedMap[_,_]]

  override def findMapLikeDeserializer(theType: MapLikeType,
                              config: DeserializationConfig,
                              beanDesc: BeanDescription,
                              keyDeserializer: KeyDeserializer,
                              elementTypeDeserializer: TypeDeserializer,
                              elementDeserializer: JsonDeserializer[_]): JsonDeserializer[_] =
    if (!SORTED_MAP.isAssignableFrom(theType.getRawClass)) null
    else new SortedMapDeserializer(theType,config,keyDeserializer,elementDeserializer,elementTypeDeserializer)
}


trait SortedMapDeserializerModule extends MapTypeModifierModule {
  this += (_ addDeserializers SortedMapDeserializerResolver)
} 
Example 3
Source File: JavaUtils.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.api.java

import java.{util => ju}
import java.util.Map.Entry

import scala.collection.mutable

private[spark] object JavaUtils {
  def optionToOptional[T](option: Option[T]): Optional[T] =
    if (option.isDefined) {
      Optional.of(option.get)
    } else {
      Optional.empty[T]
    }

  // Workaround for SPARK-3926 / SI-8911
  def mapAsSerializableJavaMap[A, B](underlying: collection.Map[A, B]): SerializableMapWrapper[A, B]
    = new SerializableMapWrapper(underlying)

  // Implementation is copied from scala.collection.convert.Wrappers.MapWrapper,
  // but implements java.io.Serializable. It can't just be subclassed to make it
  // Serializable since the MapWrapper class has no no-arg constructor. This class
  // doesn't need a no-arg constructor though.
  class SerializableMapWrapper[A, B](underlying: collection.Map[A, B])
    extends ju.AbstractMap[A, B] with java.io.Serializable { self =>

    override def size: Int = underlying.size

    override def get(key: AnyRef): B = try {
      underlying.getOrElse(key.asInstanceOf[A], null.asInstanceOf[B])
    } catch {
      case ex: ClassCastException => null.asInstanceOf[B]
    }

    override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
      override def size: Int = self.size

      override def iterator: ju.Iterator[ju.Map.Entry[A, B]] = new ju.Iterator[ju.Map.Entry[A, B]] {
        val ui = underlying.iterator
        var prev : Option[A] = None

        def hasNext: Boolean = ui.hasNext

        def next(): Entry[A, B] = {
          val (k, v) = ui.next()
          prev = Some(k)
          new ju.Map.Entry[A, B] {
            import scala.util.hashing.byteswap32
            override def getKey: A = k
            override def getValue: B = v
            override def setValue(v1 : B): B = self.put(k, v1)
            override def hashCode: Int = byteswap32(k.hashCode) + (byteswap32(v.hashCode) << 16)
            override def equals(other: Any): Boolean = other match {
              case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
              case _ => false
            }
          }
        }

        def remove() {
          prev match {
            case Some(k) =>
              underlying match {
                case mm: mutable.Map[A, _] =>
                  mm.remove(k)
                  prev = None
                case _ =>
                  throw new UnsupportedOperationException("remove")
              }
            case _ =>
              throw new IllegalStateException("next must be called at least once before remove")
          }
        }
      }
    }
  }
} 
Example 4
Source File: JavaUtils.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.api.java

import java.{util => ju}
import java.util.Map.Entry

import scala.collection.mutable

private[spark] object JavaUtils {
  def optionToOptional[T](option: Option[T]): Optional[T] =
    if (option.isDefined) {
      Optional.of(option.get)
    } else {
      Optional.empty[T]
    }

  // Workaround for SPARK-3926 / SI-8911
  def mapAsSerializableJavaMap[A, B](underlying: collection.Map[A, B]): SerializableMapWrapper[A, B]
    = new SerializableMapWrapper(underlying)

  // Implementation is copied from scala.collection.convert.Wrappers.MapWrapper,
  // but implements java.io.Serializable. It can't just be subclassed to make it
  // Serializable since the MapWrapper class has no no-arg constructor. This class
  // doesn't need a no-arg constructor though.
  class SerializableMapWrapper[A, B](underlying: collection.Map[A, B])
    extends ju.AbstractMap[A, B] with java.io.Serializable { self =>

    override def size: Int = underlying.size

    override def get(key: AnyRef): B = try {
      underlying.getOrElse(key.asInstanceOf[A], null.asInstanceOf[B])
    } catch {
      case ex: ClassCastException => null.asInstanceOf[B]
    }

    override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
      override def size: Int = self.size

      override def iterator: ju.Iterator[ju.Map.Entry[A, B]] = new ju.Iterator[ju.Map.Entry[A, B]] {
        val ui = underlying.iterator
        var prev : Option[A] = None

        def hasNext: Boolean = ui.hasNext

        def next(): Entry[A, B] = {
          val (k, v) = ui.next()
          prev = Some(k)
          new ju.Map.Entry[A, B] {
            import scala.util.hashing.byteswap32
            override def getKey: A = k
            override def getValue: B = v
            override def setValue(v1 : B): B = self.put(k, v1)
            override def hashCode: Int = byteswap32(k.hashCode) + (byteswap32(v.hashCode) << 16)
            override def equals(other: Any): Boolean = other match {
              case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
              case _ => false
            }
          }
        }

        def remove() {
          prev match {
            case Some(k) =>
              underlying match {
                case mm: mutable.Map[A, _] =>
                  mm.remove(k)
                  prev = None
                case _ =>
                  throw new UnsupportedOperationException("remove")
              }
            case _ =>
              throw new IllegalStateException("next must be called at least once before remove")
          }
        }
      }
    }
  }
} 
Example 5
Source File: JavaUtils.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.api.java

import java.util.Map.Entry

import com.google.common.base.Optional

import java.{util => ju}
import scala.collection.mutable

private[spark] object JavaUtils {
  def optionToOptional[T](option: Option[T]): Optional[T] =
    option match {
      case Some(value) => Optional.of(value)
      case None => Optional.absent()
    }

  // Workaround for SPARK-3926 / SI-8911
  def mapAsSerializableJavaMap[A, B](underlying: collection.Map[A, B]): SerializableMapWrapper[A, B]
    = new SerializableMapWrapper(underlying)

  // Implementation is copied from scala.collection.convert.Wrappers.MapWrapper,
  // but implements java.io.Serializable. It can't just be subclassed to make it
  // Serializable since the MapWrapper class has no no-arg constructor. This class
  // doesn't need a no-arg constructor though.
  class SerializableMapWrapper[A, B](underlying: collection.Map[A, B])
    extends ju.AbstractMap[A, B] with java.io.Serializable { self =>

    override def size: Int = underlying.size

    override def get(key: AnyRef): B = try {
      underlying.getOrElse(key.asInstanceOf[A], null.asInstanceOf[B])
    } catch {
      case ex: ClassCastException => null.asInstanceOf[B]
    }

    override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
      override def size: Int = self.size

      override def iterator: ju.Iterator[ju.Map.Entry[A, B]] = new ju.Iterator[ju.Map.Entry[A, B]] {
        val ui = underlying.iterator
        var prev : Option[A] = None

        def hasNext: Boolean = ui.hasNext

        def next(): Entry[A, B] = {
          val (k, v) = ui.next()
          prev = Some(k)
          new ju.Map.Entry[A, B] {
            import scala.util.hashing.byteswap32
            override def getKey: A = k
            override def getValue: B = v
            override def setValue(v1 : B): B = self.put(k, v1)
            override def hashCode: Int = byteswap32(k.hashCode) + (byteswap32(v.hashCode) << 16)
            override def equals(other: Any): Boolean = other match {
              case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
              case _ => false
            }
          }
        }

        def remove() {
          prev match {
            case Some(k) =>
              underlying match {
                case mm: mutable.Map[A, _] =>
                  mm.remove(k)
                  prev = None
                case _ =>
                  throw new UnsupportedOperationException("remove")
              }
            case _ =>
              throw new IllegalStateException("next must be called at least once before remove")
          }
        }
      }
    }
  }
} 
Example 6
Source File: JavaUtils.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.api.java

import java.util.Map.Entry

import com.google.common.base.Optional

import java.{util => ju}
import scala.collection.mutable

private[spark] object JavaUtils {
  def optionToOptional[T](option: Option[T]): Optional[T] =
    option match {
      case Some(value) => Optional.of(value)
      case None => Optional.absent()
    }

  // Workaround for SPARK-3926 / SI-8911
  def mapAsSerializableJavaMap[A, B](underlying: collection.Map[A, B]): SerializableMapWrapper[A, B]
    = new SerializableMapWrapper(underlying)

  // Implementation is copied from scala.collection.convert.Wrappers.MapWrapper,
  // but implements java.io.Serializable. It can't just be subclassed to make it
  // Serializable since the MapWrapper class has no no-arg constructor. This class
  // doesn't need a no-arg constructor though.
  class SerializableMapWrapper[A, B](underlying: collection.Map[A, B])
    extends ju.AbstractMap[A, B] with java.io.Serializable { self =>

    override def size: Int = underlying.size

    override def get(key: AnyRef): B = try {
      underlying.getOrElse(key.asInstanceOf[A], null.asInstanceOf[B])
    } catch {
      case ex: ClassCastException => null.asInstanceOf[B]
    }

    override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
      override def size: Int = self.size

      override def iterator: ju.Iterator[ju.Map.Entry[A, B]] = new ju.Iterator[ju.Map.Entry[A, B]] {
        val ui = underlying.iterator
        var prev : Option[A] = None

        def hasNext: Boolean = ui.hasNext

        def next(): Entry[A, B] = {
          val (k, v) = ui.next()
          prev = Some(k)
          new ju.Map.Entry[A, B] {
            import scala.util.hashing.byteswap32
            override def getKey: A = k
            override def getValue: B = v
            override def setValue(v1 : B): B = self.put(k, v1)
            override def hashCode: Int = byteswap32(k.hashCode) + (byteswap32(v.hashCode) << 16)
            override def equals(other: Any): Boolean = other match {
              case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
              case _ => false
            }
          }
        }

        def remove() {
          prev match {
            case Some(k) =>
              underlying match {
                case mm: mutable.Map[A, _] =>
                  mm.remove(k)
                  prev = None
                case _ =>
                  throw new UnsupportedOperationException("remove")
              }
            case _ =>
              throw new IllegalStateException("next must be called at least once before remove")
          }
        }
      }
    }
  }
} 
Example 7
Source File: Pipe.scala    From hazelcast-scala   with Apache License 2.0 5 votes vote down vote up
package com.hazelcast.Scala

import java.util.Map.Entry
import com.hazelcast.core.HazelcastInstance

private[Scala] sealed trait EntryFold[A, +V] extends Serializable {
  def foldEntry(acc: A, entry: Entry[_, _])(fold: (A, V) => A): A
}

private[Scala] sealed trait Pipe[+V] extends Serializable {
  def prepare[A](hz: HazelcastInstance): EntryFold[A, V]
}

private[Scala] object PassThroughPipe extends Pipe[Any] {
  def apply[E]: Pipe[E] = this.asInstanceOf[Pipe[E]]
  private[this] val passThrough = new EntryFold[Any, Any] {
    def foldEntry(acc: Any, entry: Entry[_, _])(fold: (Any, Any) => Any): Any = fold(acc, entry)
  }
  def prepare[A](hz: HazelcastInstance) = passThrough.asInstanceOf[EntryFold[A, Any]]
}
private[Scala] class MapPipe[I, V](map: I => V, prev: Pipe[I]) extends Pipe[V] {
  def prepare[A](hz: HazelcastInstance) = new EntryFold[A, V] {
    private[this] val prevFold = prev.prepare[A](hz)
    def foldEntry(acc: A, entry: Entry[_, _])(fold: (A, V) => A): A =
      prevFold.foldEntry(acc, entry) {
        case (acc, any) => fold(acc, map(any))
      }
  }
}
private[Scala] class MapTransformPipe[EK, EV, T](transform: Entry[EK, EV] => T, prev: Pipe[Entry[EK, EV]]) extends Pipe[Entry[EK, T]] {
  def this(prev: Pipe[Entry[EK, EV]], mvf: EV => T) = this((entry: Entry[EK, EV]) => mvf(entry.value), prev)
  type V = Entry[EK, T]
  def prepare[A](hz: HazelcastInstance) = new EntryFold[A, V] {
    private[this] val prevFold = prev.prepare[A](hz)
    def foldEntry(acc: A, entry: Entry[_, _])(fold: (A, V) => A): A =
      prevFold.foldEntry(acc, entry) {
        case (acc, entry) =>
          val tEntry = new ImmutableEntry(entry.key, transform(entry))
          fold(acc, tEntry)
      }
  }
}
private[Scala] class FilterPipe[E](include: E => Boolean, prev: Pipe[E]) extends Pipe[E] {
  def prepare[A](hz: HazelcastInstance) = new EntryFold[A, E] {
    private[this] val prevFold = prev.prepare[A](hz)
    def foldEntry(acc: A, entry: Entry[_, _])(fold: (A, E) => A): A =
      prevFold.foldEntry(acc, entry) {
        case (acc, any) if include(any) => fold(acc, any)
        case (acc, _) => acc
      }
  }
}
private[Scala] class FlatMapPipe[E, V](flatMap: E => Traversable[V], prev: Pipe[E]) extends Pipe[V] {
  def prepare[A](hz: HazelcastInstance) = new EntryFold[A, V] {
    private[this] val prevFold = prev.prepare[A](hz)
    def foldEntry(acc: A, entry: Entry[_, _])(fold: (A, V) => A): A =
      prevFold.foldEntry(acc, entry) {
        case (acc, any) => flatMap(any).foldLeft(acc)(fold)
      }
  }
}
private[Scala] class CollectPipe[E, V](pf: PartialFunction[E, V], prev: Pipe[E]) extends Pipe[V] {
  def prepare[A](hz: HazelcastInstance) = new EntryFold[A, V] {
    private[this] val prevFold = prev.prepare[A](hz)
    def foldEntry(acc: A, entry: Entry[_, _])(fold: (A, V) => A): A =
      prevFold.foldEntry(acc, entry) {
        case (acc, any) if pf.isDefinedAt(any) => fold(acc, pf(any))
        case (acc, _) => acc
      }
  }
}

private[Scala] class GroupByPipe[E, G, F](gf: E => G, mf: E => F, prev: Pipe[E]) extends Pipe[(G, F)] {
  type V = (G, F)
  def prepare[A](hz: HazelcastInstance) = new EntryFold[A, V] {
    private[this] val prevFold = prev.prepare[A](hz)
    def foldEntry(acc: A, entry: Entry[_, _])(fold: (A, V) => A): A =
      prevFold.foldEntry(acc, entry) {
        case (acc, any) => fold(acc, gf(any) -> mf(any))
      }
  }
}

private[Scala] final class JoinPipe[E, JT](join: Join[E, _, _] { type T = JT }, prev: Pipe[E]) extends Pipe[JT] {
  def prepare[A](hz: HazelcastInstance) = new EntryFold[A, JT] {
    private[this] val prevFold = prev.prepare[A](hz)
    private[this] val hzJoin = join.init[A](hz)
    def foldEntry(acc: A, entry: Entry[_, _])(fold: (A, JT) => A): A = {
      prevFold.foldEntry(acc, entry) {
        case (acc, elem) => hzJoin(acc, elem, fold)
      }
    }
  }
} 
Example 8
Source File: EntryMapDDS.scala    From hazelcast-scala   with Apache License 2.0 5 votes vote down vote up
package com.hazelcast.Scala.dds

import com.hazelcast.Scala._
import java.util.Map.Entry
import com.hazelcast.query.Predicate
import com.hazelcast.query.TruePredicate
import scala.concurrent.ExecutionContext

class EntryMapDDS[K, V](dds: MapDDS[K, V, Entry[K, V]]) extends MapEntryEventSubscription[K, V] {
  def filterKeys(key: K, others: K*): DDS[Entry[K, V]] = filterKeys((key +: others).toSet)
  def filterKeys(f: K => Boolean): DDS[Entry[K, V]] = {
    f match {
      case set: collection.Set[K] =>
        val keySet = dds.keySet.map(_.intersect(set)).getOrElse(set.toSet)
        new MapDDS(dds.imap, dds.predicate, Some(keySet), dds.pipe)
      case filter => dds.pipe match {
        case None =>
          val predicate = new KeyPredicate[K](filter, dds.predicate.orNull.asInstanceOf[Predicate[Object, Object]])
          new MapDDS(dds.imap, Some(predicate), dds.keySet, dds.pipe)
        case Some(existingPipe) =>
          val keyFilter = (new KeyPredicate[K](filter).apply _).asInstanceOf[Entry[K, V] => Boolean]
          val pipe = new FilterPipe(keyFilter, existingPipe)
          new MapDDS(dds.imap, dds.predicate, dds.keySet, Some(pipe))
      }
    }
  }
  def filterValues(filter: V => Boolean): DDS[Entry[K, V]] = {
    dds.pipe match {
      case None =>
        val predicate = new ValuePredicate[V](filter, dds.predicate.orNull.asInstanceOf[Predicate[Object, Object]])
        new MapDDS(dds.imap, Some(predicate), dds.keySet, dds.pipe)
      case Some(existingPipe) =>
        val valueFilter = (new ValuePredicate[V](filter).apply _).asInstanceOf[Entry[K, V] => Boolean]
        val pipe = new FilterPipe(valueFilter, existingPipe)
        new MapDDS(dds.imap, dds.predicate, dds.keySet, Some(pipe))
    }
  }
  def mapValues[T](mvf: V => T): DDS[Entry[K, T]] = {
    val prevPipe = dds.pipe getOrElse PassThroughPipe[Entry[K, V]]
    val pipe = new MapTransformPipe(prevPipe, mvf)
    new MapDDS(dds.imap, dds.predicate, dds.keySet, Some(pipe))
  }
  def transform[T](tf: Entry[K, V] => T): DDS[Entry[K, T]] = {
    val prevPipe = dds.pipe getOrElse PassThroughPipe[Entry[K, V]]
    val pipe = new MapTransformPipe[K, V, T](tf, prevPipe)
    new MapDDS(dds.imap, dds.predicate, dds.keySet, Some(pipe))
  }

  type MSR = ListenerRegistration
  def onKeyEvents(localOnly: Boolean, runOn: ExecutionContext)(pf: PartialFunction[KeyEvent[K], Unit]): MSR =
    subscribeEntries(new KeyListener(pf, Option(runOn)), localOnly, includeValue = false)
  def onEntryEvents(localOnly: Boolean, runOn: ExecutionContext)(pf: PartialFunction[EntryEvent[K, V], Unit]): MSR =
    subscribeEntries(new EntryListener(pf, Option(runOn)), localOnly, includeValue = true)
  def onKeyEvents(cb: OnKeyEvent[K], localOnly: Boolean): MSR =
    subscribeEntries(cb, localOnly, includeValue = false)
  def onEntryEvents(cb: OnEntryEvent[K, V], localOnly: Boolean): MSR =
    subscribeEntries(cb, localOnly, includeValue = true)

  private def subscribeEntries(
    listener: com.hazelcast.map.listener.MapListener,
    localOnly: Boolean,
    includeValue: Boolean): ListenerRegistration = {

    val (singleKey, predicate) = dds.keySet match {
      case None =>
        None -> dds.predicate.getOrElse(TruePredicate.INSTANCE).asInstanceOf[Predicate[K, V]]
      case Some(keys) if keys.size <= 1 =>
        keys.headOption -> dds.predicate.getOrElse(TruePredicate.INSTANCE).asInstanceOf[Predicate[K, V]]
      case Some(keys) =>
        None -> (dds.predicate match {
          case None => new KeyPredicate(keys).asInstanceOf[Predicate[K, V]]
          case Some(predicate) => (new KeyPredicate(keys) && predicate).asInstanceOf[Predicate[K, V]]
        })
    }
    val regId = singleKey match {
      case Some(key) if localOnly => dds.imap.addLocalEntryListener(listener, predicate, key, includeValue)
      case None if localOnly => dds.imap.addLocalEntryListener(listener, predicate, includeValue)
      case Some(key) => dds.imap.addEntryListener(listener, predicate, key, includeValue)
      case None => dds.imap.addEntryListener(listener, predicate, includeValue)
    }
    new ListenerRegistration {
      def cancel(): Boolean = dds.imap.removeEntryListener(regId)
    }
  }

} 
Example 9
Source File: SingleEntryCallbackProcessor.scala    From hazelcast-scala   with Apache License 2.0 5 votes vote down vote up
package com.hazelcast.Scala

import com.hazelcast.map.EntryProcessor
import java.util.Map.Entry
import scala.util.control.NonFatal
import com.hazelcast.map.EntryBackupProcessor

private[Scala] sealed abstract class SingleEntryCallbackProcessor[K, V, R] extends EntryProcessor[K, V] {
  final def process(entry: Entry[K, V]): Object =
    try {
      onEntry(entry).asInstanceOf[Object]
    } catch {
      case NonFatal(e) => e
    }
  def onEntry(entry: Entry[K, V]): R

  final def newCallback(nullReplacement: R = null.asInstanceOf[R]) = new FutureCallback[R, R](nullReplacement)
  final def newCallbackOpt = new FutureCallback[R, Option[R]](None)(Some(_))

}

private[Scala] abstract class SingleEntryCallbackReader[K, V, R] extends SingleEntryCallbackProcessor[K, V, R] {
  final def getBackupProcessor = null
  final def onEntry(entry: Entry[K, V]): R = onEntry(entry.key, entry.value)
  def onEntry(key: K, value: V): R
}
private[Scala] abstract class SingleEntryCallbackUpdater[K, V, R]
    extends SingleEntryCallbackProcessor[K, V, R]
    with EntryBackupProcessor[K, V] {
  final def getBackupProcessor = this
  def processBackup(entry: Entry[K, V]): Unit = onEntry(entry)
} 
Example 10
Source File: Stage.scala    From hazelcast-scala   with Apache License 2.0 5 votes vote down vote up
package com.hazelcast.Scala.actress

import scala.concurrent.Future

import com.hazelcast.Scala._
import com.hazelcast.core.{ HazelcastInstance, IMap }
import java.util.Map.Entry
import com.hazelcast.core.HazelcastInstanceAware
import scala.beans.BeanProperty
import com.hazelcast.instance.HazelcastInstanceImpl
import com.hazelcast.instance.HazelcastInstanceProxy

class Stage(private val actressMap: IMap[String, Array[Byte]]) {

  def this(name: String, hz: HazelcastInstance) = this(hz.getMap(name))

  def actressOf[A <: AnyRef](name: String, create: => A): ActressRef[A] =
    new ActressImpl(name, actressMap, create)

}

private class ActressImpl[A <: AnyRef](
  val name: String,
  imap: IMap[String, Array[Byte]],
  create: => A)
    extends ActressRef[A] {

  def apply[R](thunk: (HazelcastInstance, A) => R): Future[R] = {
    val ep = new SingleEntryCallbackUpdater[String, Array[Byte], R] with HazelcastInstanceAware {
      val newActress = create _
      @BeanProperty @transient
      var hazelcastInstance: HazelcastInstance = _
      var newState: Array[Byte] = _
      def onEntry(entry: Entry[String, Array[Byte]]): R = {
        val serializationService = hazelcastInstance match {
          case hz: HazelcastInstanceImpl => hz.getSerializationService
          case hz: HazelcastInstanceProxy => hz.getSerializationService
        }
        val actress = entry.value match {
          case null => newActress()
          case bytes =>
            val inp = serializationService.createObjectDataInput(bytes)
            serializationService.readObject(inp)
        }
        val result = thunk(hazelcastInstance, actress)
        newState = {
          val out = serializationService.createObjectDataOutput()
          serializationService.writeObject(out, actress)
          out.toByteArray()
        }
        entry.value = newState
        result
      }
      override def processBackup(entry: Entry[String, Array[Byte]]): Unit = {
        entry.value = newState
      }
    }
    val callback = ep.newCallback()
    imap.submitToKey(name, ep, callback)
    callback.future
  }
} 
Example 11
Source File: lower-priority-implicits.scala    From hazelcast-scala   with Apache License 2.0 5 votes vote down vote up
package com.hazelcast.Scala

import java.util.Map.Entry

import com.hazelcast.Scala.dds._
import com.hazelcast.core._
import com.hazelcast.durableexecutor.DurableExecutorService
import com.hazelcast.query._

import language.implicitConversions

trait LowPriorityImplicits {

  @inline implicit def builder2anypred(p: PredicateBuilder): Predicate[Any, Any] = p.asInstanceOf[Predicate[Any, Any]]
  @inline implicit def sql2anypred(p: SqlPredicate): Predicate[Any, Any] = p.asInstanceOf[Predicate[Any, Any]]

  @inline implicit def dds2aggrDds[E](dds: DDS[E]): AggrDDS[E] = dds match {
    case dds: MapDDS[_, _, E] => new AggrMapDDS(dds)
  }
  @inline implicit def sortdds2aggrDds[E](dds: SortDDS[E]): AggrDDS[E] = dds match {
    case dds: MapSortDDS[_, _, E] => new AggrMapDDS(dds.dds, Sorted(dds.ord, dds.skip, dds.limit))
  }
  @inline implicit def dds2AggrGrpDds[G, E](dds: GroupDDS[G, E]): AggrGroupDDS[G, E] = dds match {
    case dds: MapGroupDDS[_, _, G, E] => new AggrGroupMapDDS(dds.dds)
  }
}
trait MediumPriorityImplicits extends LowPriorityImplicits {
  @inline implicit def dds2ordDds[O: Ordering](dds: DDS[O]): OrderingDDS[O] = dds match {
    case dds: MapDDS[_, _, O] => new OrderingMapDDS(dds)
  }
  @inline implicit def sortdds2ordDds[O: Ordering](dds: SortDDS[O]): OrderingDDS[O] = dds match {
    case dds: MapSortDDS[_, _, O] => new OrderingMapDDS(dds.dds, Sorted(dds.ord, dds.skip, dds.limit))
  }
  @inline implicit def dds2OrdGrpDds[G, O: Ordering](dds: GroupDDS[G, O]): OrderingGroupDDS[G, O] = dds match {
    case dds: MapGroupDDS[_, _, G, O] => new OrderingGroupMapDDS(dds.dds)
  }
}
trait HighPriorityImplicits extends MediumPriorityImplicits {
  @inline implicit def imap2dds[K, V](imap: IMap[K, V]): DDS[Entry[K, V]] = new MapDDS(imap)
  @inline implicit def imap2aggrDds[K, V](imap: IMap[K, V]): AggrDDS[Entry[K, V]] = dds2aggrDds(new MapDDS(imap))
  @inline implicit def inst2scala(inst: HazelcastInstance) = new HzHazelcastInstance(inst)
  @inline implicit def cluster2scala(cl: Cluster) = new HzCluster(cl)
  @inline implicit def clientsvc2scala(cs: ClientService) = new HzClientService(cs)
  @inline implicit def partsvc2scala(ps: PartitionService) = new HzPartitionService(ps)
  @inline implicit def topic2scala[T](topic: ITopic[T]) = new HzTopic(topic)
  @inline implicit def queue2scala[T](queue: BaseQueue[T]) = new HzQueue(queue)
  @inline implicit def txqueue2scala[T](queue: TransactionalQueue[T]) = new HzTxQueue(queue)
  @inline implicit def exec2scala(exec: IExecutorService) = new HzExecutorService(exec)
  @inline implicit def durexec2scala(exec: DurableExecutorService) = new HzDurableExecutorService(exec)
  @inline implicit def dds2numDds[N: Numeric](dds: DDS[N]): NumericDDS[N] = dds match {
    case dds: MapDDS[_, _, N] => new NumericMapDDS(dds)
  }
  @inline implicit def sortdds2numDds[N: Numeric](dds: SortDDS[N]): NumericDDS[N] = dds match {
    case dds: MapSortDDS[_, _, N] => new NumericMapDDS(dds.dds, Sorted(dds.ord, dds.skip, dds.limit))
  }
  @inline implicit def dds2NumGrpDds[G, N: Numeric](dds: GroupDDS[G, N]): NumericGroupDDS[G, N] = dds match {
    case grpDDS: MapGroupDDS[_, _, G, N] => new NumericGroupMapDDS(grpDDS.dds)
  }
  @inline implicit def dds2entryDds[K, V](dds: DDS[Entry[K, V]]): EntryMapDDS[K, V] = dds match {
    case dds: MapDDS[K, V, Entry[K, V]] @unchecked => new EntryMapDDS(dds)
  }
  @inline implicit def imap2entryDds[K, V](imap: IMap[K, V]): EntryMapDDS[K, V] = new EntryMapDDS(new MapDDS(imap))
} 
Example 12
Source File: JavaUtils.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.api.java

import java.{util => ju}
import java.util.Map.Entry

import scala.collection.mutable

private[spark] object JavaUtils {
  def optionToOptional[T](option: Option[T]): Optional[T] =
    if (option.isDefined) {
      Optional.of(option.get)
    } else {
      Optional.empty[T]
    }

  // Workaround for SPARK-3926 / SI-8911
  def mapAsSerializableJavaMap[A, B](underlying: collection.Map[A, B]): SerializableMapWrapper[A, B]
    = new SerializableMapWrapper(underlying)

  // Implementation is copied from scala.collection.convert.Wrappers.MapWrapper,
  // but implements java.io.Serializable. It can't just be subclassed to make it
  // Serializable since the MapWrapper class has no no-arg constructor. This class
  // doesn't need a no-arg constructor though.
  class SerializableMapWrapper[A, B](underlying: collection.Map[A, B])
    extends ju.AbstractMap[A, B] with java.io.Serializable { self =>

    override def size: Int = underlying.size

    // Delegate to implementation because AbstractMap implementation iterates over whole key set
    override def containsKey(key: AnyRef): Boolean = try {
      underlying.contains(key.asInstanceOf[A])
    } catch {
      case _: ClassCastException => false
    }

    override def get(key: AnyRef): B = try {
      underlying.getOrElse(key.asInstanceOf[A], null.asInstanceOf[B])
    } catch {
      case _: ClassCastException => null.asInstanceOf[B]
    }

    override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
      override def size: Int = self.size

      override def iterator: ju.Iterator[ju.Map.Entry[A, B]] = new ju.Iterator[ju.Map.Entry[A, B]] {
        val ui = underlying.iterator
        var prev : Option[A] = None

        override def hasNext: Boolean = ui.hasNext

        override def next(): Entry[A, B] = {
          val (k, v) = ui.next()
          prev = Some(k)
          new ju.Map.Entry[A, B] {
            import scala.util.hashing.byteswap32
            override def getKey: A = k
            override def getValue: B = v
            override def setValue(v1 : B): B = self.put(k, v1)
            override def hashCode: Int = byteswap32(k.hashCode) + (byteswap32(v.hashCode) << 16)
            override def equals(other: Any): Boolean = other match {
              case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
              case _ => false
            }
          }
        }

        override def remove() {
          prev match {
            case Some(k) =>
              underlying match {
                case mm: mutable.Map[A, _] =>
                  mm.remove(k)
                  prev = None
                case _ =>
                  throw new UnsupportedOperationException("remove")
              }
            case _ =>
              throw new IllegalStateException("next must be called at least once before remove")
          }
        }
      }
    }
  }
} 
Example 13
Source File: JavaUtils.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.api.java

import java.util.Map.Entry

import com.google.common.base.Optional

import java.{util => ju}
import scala.collection.mutable

private[spark] object JavaUtils {
  def optionToOptional[T](option: Option[T]): Optional[T] =
    option match {
      case Some(value) => Optional.of(value)
      case None => Optional.absent()
    }

  // Workaround for SPARK-3926 / SI-8911
  def mapAsSerializableJavaMap[A, B](underlying: collection.Map[A, B]): SerializableMapWrapper[A, B]
    = new SerializableMapWrapper(underlying)

  // Implementation is copied from scala.collection.convert.Wrappers.MapWrapper,
  // but implements java.io.Serializable. It can't just be subclassed to make it
  // Serializable since the MapWrapper class has no no-arg constructor. This class
  // doesn't need a no-arg constructor though.
  class SerializableMapWrapper[A, B](underlying: collection.Map[A, B])
    extends ju.AbstractMap[A, B] with java.io.Serializable { self =>

    override def size: Int = underlying.size

    override def get(key: AnyRef): B = try {
      underlying.getOrElse(key.asInstanceOf[A], null.asInstanceOf[B])
    } catch {
      case ex: ClassCastException => null.asInstanceOf[B]
    }

    override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
      override def size: Int = self.size

      override def iterator: ju.Iterator[ju.Map.Entry[A, B]] = new ju.Iterator[ju.Map.Entry[A, B]] {
        val ui = underlying.iterator
        var prev : Option[A] = None

        def hasNext: Boolean = ui.hasNext

        def next(): Entry[A, B] = {
          val (k, v) = ui.next()
          prev = Some(k)
          new ju.Map.Entry[A, B] {
            import scala.util.hashing.byteswap32
            override def getKey: A = k
            override def getValue: B = v
            override def setValue(v1 : B): B = self.put(k, v1)
            override def hashCode: Int = byteswap32(k.hashCode) + (byteswap32(v.hashCode) << 16)
            override def equals(other: Any): Boolean = other match {
              case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
              case _ => false
            }
          }
        }

        def remove() {
          prev match {
            case Some(k) =>
              underlying match {
                case mm: mutable.Map[A, _] =>
                  mm.remove(k)
                  prev = None
                case _ =>
                  throw new UnsupportedOperationException("remove")
              }
            case _ =>
              throw new IllegalStateException("next must be called at least once before remove")
          }
        }
      }
    }
  }
} 
Example 14
Source File: Memoize.scala    From scalismo   with Apache License 2.0 5 votes vote down vote up
package scalismo.utils

import java.util.Map.Entry

class Memoize[-T, +R](f: T => R, cacheSizeHint: Int) extends (T => R) {

  private class Holder[X] {
    private var data: Option[X] = None

    def getOrPut(f: => X): X = {
      data match {
        case Some(v) => v
        case None =>
          this.synchronized {
            data match {
              case Some(v) => v
              case None =>
                data = Some(f)
                data.get
            }
          }
      }
    }
  }

  private[this] val cache = new java.util.LinkedHashMap[T, Holder[R]](64, 0.75f, false) {
    override def removeEldestEntry(eldest: Entry[T, Holder[R]]) = size() > cacheSizeHint
  }

  override def apply(x: T) = {
    val holder: Holder[R] = {
      cache.get(x) match {
        case h: Holder[R] => h
        case null =>
          cache.synchronized {
            cache.get(x) match {
              case h: Holder[R] => h
              case null =>
                val h = new Holder[R]
                cache.put(x, h)
                h
            }
          }
      }
    }
    holder.getOrPut(f(x))
  }

}

object Memoize {

  def apply[T, R](f: T => R, cacheSizeHint: Int) = new Memoize[T, R](f, cacheSizeHint)

  def memfun2[T, R, F](f: F, cacheSizeHint: Int)(implicit e: Tupler[F, T => R]): F =
    e.untupled(new Memoize(e.tupled(f), cacheSizeHint))

}

sealed class Tupler[U, T](val tupled: U => T, val untupled: T => U)

object Tupler {

  implicit def function0[R]: Tupler[() => R, Unit => R] =
    new Tupler((f: () => R) => (_: Unit) => f(), (f: Unit => R) => () => f(()))

  implicit def function1[T, R]: Tupler[T => R, T => R] = new Tupler(identity, identity)

  implicit def function2[T1, T2, R]: Tupler[(T1, T2) => R, ((T1, T2)) => R] =
    new Tupler(_.tupled, Function.untupled[T1, T2, R])

}