org.apache.commons.lang3.SerializationUtils Scala Examples

The following examples show how to use org.apache.commons.lang3.SerializationUtils. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: ResetSystemProperties.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties())` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 2
Source File: AbstractCriterion.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.nn.abstractnn

import com.intel.analytics.bigdl.nn.abstractnn.SizeAverageStatus.SizeAverageStatus
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.{T, Table}
import org.apache.commons.lang3.SerializationUtils

import scala.reflect.ClassTag


  def cloneCriterion(): AbstractCriterion[A, B, T] = {
    SerializationUtils.clone(this)
  }


  def canEqual(other: Any): Boolean = other.isInstanceOf[AbstractCriterion[A, B, T]]

  override def equals(other: Any): Boolean = other match {
    case that: AbstractCriterion[A, B, T] =>
      (that canEqual this) &&
        (that.getClass equals this.getClass) &&
        output == that.output
    case _ => false
  }

  override def hashCode(): Int = {
    def getHashCode(a: Any): Int = if (a == null) 0 else a.hashCode()
    val state = Seq(output)
    state.map(getHashCode).foldLeft(0)((a, b) => 31 * a + b)
  }
}

object SizeAverageStatus extends Enumeration {
  type SizeAverageStatus = Value
  val True, False, None = Value
} 
Example 3
Source File: DnnTensorSpec.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.tensor

import com.intel.analytics.bigdl.mkl.MklDnn
import com.intel.analytics.bigdl.nn.mkldnn.MemoryOwner
import com.intel.analytics.bigdl.utils.{BigDLSpecHelper, T}
import org.apache.commons.lang3.SerializationUtils
import org.scalatest.BeforeAndAfter

class DnnTensorSpec extends BigDLSpecHelper{
  implicit object Owner extends MemoryOwner {
  }
  "nElement" should "be correct" in {
    val tensor = DnnTensor[Float](3, 4, 5)
    tensor.nElement() should be(3 * 4 * 5)
  }

  "DnnTensor" should "does not support double" in {
    intercept[UnsupportedOperationException] {
      val t = DnnTensor[Double](3, 4, 5)
    }
  }

  "Copy" should "be correct" in {
    val heapTensor = Tensor[Float](T(1, 2, 3, 4))
    val dnnTensor1 = DnnTensor[Float](4)
    dnnTensor1.copy(heapTensor)
    val dnnTensor2 = DnnTensor[Float](4)
    dnnTensor2.copy(dnnTensor1)
    val heapTensor2 = Tensor[Float](4)
    heapTensor2.copy(dnnTensor2)
    heapTensor2 should be(heapTensor)
  }

  "release" should "be correct" in {
    val tensor = DnnTensor[Float](3, 4, 5)
    tensor.isReleased() should be(false)
    tensor.release()
    tensor.isReleased() should be(true)
  }

  "resize" should "be correct" in {
    val tensor = DnnTensor[Float](3, 4)
    tensor.size() should be(Array(3, 4))
    tensor.resize(Array(2, 3))
    tensor.size() should be(Array(2, 3))
    tensor.resize(2)
    tensor.size(1) should be(2)
    tensor.resize(Array(5, 6, 7))
    tensor.size() should be(Array(5, 6, 7))
    tensor.size(2) should be(6)
  }

  "add" should "be correct" in {
    val heapTensor1 = Tensor[Float](T(1, 2, 3, 4))
    val heapTensor2 = Tensor[Float](T(2, 5, 1, 7))
    val dnnTensor1 = DnnTensor[Float](4).copy(heapTensor1)
    val dnnTensor2 = DnnTensor[Float](4).copy(heapTensor2)
    dnnTensor1.add(dnnTensor2)
    val heapTensor3 = Tensor[Float](4).copy(dnnTensor1)
    heapTensor3 should be(Tensor[Float](T(3, 7, 4, 11)))
  }

  "tensor clone with java serialization" should "work correctly" in {
    val heapTensor = Tensor[Float](T(1, 2, 3, 4)).rand(-1, 1)
    val dnnTensor = DnnTensor[Float](4).copy(heapTensor)

    val cloned = SerializationUtils.clone(dnnTensor).asInstanceOf[DnnTensor[Float]]
    val heapCloned = Tensor[Float](4).copy(cloned)

    println(heapTensor)
    println("=" * 80)
    println(heapCloned)

    heapCloned should be (heapTensor)
  }
} 
Example 4
Source File: ReLUSpec.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.nn.mkldnn

import com.intel.analytics.bigdl.mkl.Memory
import com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.mkldnn.Phase.TrainingPhase
import com.intel.analytics.bigdl.numeric.NumericFloat
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.T
import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{FlatSpec, Matchers}

class ReLUSpec extends FlatSpec with Matchers {
  "a simple relu" should "be correct" in {
    val layer = ReLU(0.0f)
    val input = Tensor[Float](T(
      T(1.0, 2.0),
      T(-1.0, -2.0)
    ))
    val seq = Sequential()
    seq.add(ReorderMemory(HeapData(Array(2, 2), Memory.Format.nc),
      HeapData(Array(2, 2), Memory.Format.nc)))
    seq.add(layer)
    seq.add(ReorderMemory(HeapData(Array(2, 2), Memory.Format.nc),
      HeapData(Array(2, 2), Memory.Format.nc)))
    seq.compile(Phase.TrainingPhase, Array(HeapData(Array(2, 2), Memory.Format.nc)))
    seq.forward(input) should be(Tensor[Float](T(
      T(1.0, 2.0),
      T(0.0, 0.0)
    )))
    val grad = Tensor[Float](T(
      T(-1.0, -2.0),
      T(1.0, 2.0)
    ))
    seq.backward(input, grad) should be(Tensor[Float](T(
      T(-1.0, -2.0),
      T(0.0, 0.0)
    )))
  }

  "Relu dnn should be same with bigdl relu" should "work correctly" in {
    val input = Tensor(4, 96, 55, 55).rand(-1, 1)
    val gradOutput = Tensor(4, 96, 55, 55).rand(-1, 1)

    val relu = nn.ReLU(ip = false)
    val reludnn = ReLU()
    val defaultFormat = HeapData(input.size(), Memory.Format.nchw)
    reludnn.setRuntime(new MklDnnRuntime)
    reludnn.initFwdPrimitives(Array(defaultFormat), TrainingPhase)
    reludnn.initBwdPrimitives(Array(defaultFormat), TrainingPhase)

    val output = relu.forward(input)
    val gradInput = relu.backward(input, gradOutput)

    val outputdnn = reludnn.forward(input)
    val gradInputdnn = reludnn.backward(input, gradOutput)

    Equivalent.nearequals(output, Tools.dense(outputdnn).toTensor) should be(true)
    Equivalent.nearequals(gradInput, Tools.dense(gradInputdnn).toTensor) should be(true)
  }

  "relu with java serialization" should "work correctly" in {
    val shape = Array(4, 96, 55, 55)
    val input = Tensor(shape).rand(-1, 1)
    val gradOutput = Tensor(shape).rand(-1, 1)

    val relu = ReLU()
    relu.setRuntime(new MklDnnRuntime)
    relu.initFwdPrimitives(Array(HeapData(shape, Memory.Format.nchw)), TrainingPhase)
    relu.initBwdPrimitives(Array(HeapData(shape, Memory.Format.nchw)), TrainingPhase)

    val cloned = SerializationUtils.clone(relu)
    cloned.setRuntime(new MklDnnRuntime)
    cloned.initFwdPrimitives(Array(HeapData(shape, Memory.Format.nchw)), TrainingPhase)
    cloned.initBwdPrimitives(Array(HeapData(shape, Memory.Format.nchw)), TrainingPhase)

    relu.forward(input)
    cloned.forward(input)

    Tools.dense(relu.output) should be (Tools.dense(cloned.output))

    relu.backward(input, gradOutput)
    cloned.backward(input, gradOutput)

    Tools.dense(relu.gradInput) should be (Tools.dense(cloned.gradInput))
  }
} 
Example 5
Source File: BinaryRedisPersistence.scala    From spark-redis   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package org.apache.spark.sql.redis

import java.nio.charset.StandardCharsets.UTF_8

import org.apache.commons.lang3.SerializationUtils
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.expressions.GenericRowWithSchema
import org.apache.spark.sql.types.StructType
import redis.clients.jedis.Pipeline


class BinaryRedisPersistence extends RedisPersistence[Array[Byte]] {

  override def save(pipeline: Pipeline, key: String, value: Array[Byte], ttl: Int): Unit = {
    val keyBytes = key.getBytes(UTF_8)
    if (ttl > 0) {
      pipeline.setex(keyBytes, ttl, value)
    } else {
      pipeline.set(keyBytes, value)
    }
  }

  override def load(pipeline: Pipeline, key: String, requiredColumns: Seq[String]): Unit =
    pipeline.get(key.getBytes(UTF_8))

  override def encodeRow(keyName: String, value: Row): Array[Byte] = {
    val fields = value.schema.fields.map(_.name)
    val valuesArray = fields.map(f => value.getAs[Any](f))
    SerializationUtils.serialize(valuesArray)
  }

  override def decodeRow(keyMap: (String, String), value: Array[Byte], schema: StructType,
                         requiredColumns: Seq[String]): Row = {
    val valuesArray: Array[Any] = SerializationUtils.deserialize(value)
    new GenericRowWithSchema(valuesArray, schema)
  }
} 
Example 6
Source File: ResetSystemProperties.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties())` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 7
Source File: ResetSystemProperties.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties()` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 8
Source File: ResetSystemProperties.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties())` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 9
Source File: ResetSystemProperties.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}

import org.apache.spark.SparkFunSuite


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties()` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 10
Source File: ResetSystemProperties.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}

import org.apache.spark.SparkFunSuite


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties()` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 11
Source File: ResetSystemProperties.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties())` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 12
Source File: ResetSystemProperties.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util

import java.util.Properties

import org.apache.commons.lang3.SerializationUtils
import org.scalatest.{BeforeAndAfterEach, Suite}

import org.apache.spark.SparkFunSuite


private[spark] trait ResetSystemProperties extends BeforeAndAfterEach { this: Suite =>
  var oldProperties: Properties = null

  override def beforeEach(): Unit = {
    // we need SerializationUtils.clone instead of `new Properties(System.getProperties()` because
    // the later way of creating a copy does not copy the properties but it initializes a new
    // Properties object with the given properties as defaults. They are not recognized at all
    // by standard Scala wrapper over Java Properties then.
    oldProperties = SerializationUtils.clone(System.getProperties)
    super.beforeEach()
  }

  override def afterEach(): Unit = {
    try {
      super.afterEach()
    } finally {
      System.setProperties(oldProperties)
      oldProperties = null
    }
  }
} 
Example 13
Source File: UTF8StringSchema.scala    From piglet   with Apache License 2.0 5 votes vote down vote up
package dbis.piglet.backends.flink.streaming

import org.apache.commons.lang3.SerializationUtils
import org.apache.flink.streaming.util.serialization._
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.java.typeutils.TypeExtractor

class UTF8StringSchema extends DeserializationSchema[String] with SerializationSchema[String] {

  override def deserialize(message: Array[Byte]): String = {
    new String(message, "UTF-8")
  }   

  override def isEndOfStream(nextElement: String): Boolean = {
    false
  }   

  override def serialize(element: String): Array[Byte] = {
    element.getBytes("UTF-8")
  }   

  override def getProducedType(): TypeInformation[String] = {
    TypeExtractor.getForClass(classOf[String])
  }   
}