breeze.numerics.sqrt Scala Examples

The following examples show how to use breeze.numerics.sqrt. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: RandomAlterAspect.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.transform.vision.image.augmentation

import breeze.numerics.sqrt
import org.opencv.core.{CvType, Mat, Rect}
import com.intel.analytics.bigdl.dataset.Transformer
import com.intel.analytics.bigdl.dataset.image.LabeledBGRImage
import com.intel.analytics.bigdl.opencv.OpenCV
import org.opencv.imgproc.Imgproc

import scala.collection.Iterator
import com.intel.analytics.bigdl.opencv
import com.intel.analytics.bigdl.transform.vision.image.{FeatureTransformer, ImageFeature}
import com.intel.analytics.bigdl.transform.vision.image.opencv.OpenCVMat
import org.apache.spark.ml
import org.apache.spark.ml.feature
import org.opencv.core.Size

object RandomAlterAspect {
  def apply(min_area_ratio: Float = 0.08f,
            max_area_ratio: Int = 1,
            min_aspect_ratio_change: Float = 0.75f,
            interp_mode: String = "CUBIC",
            cropLength: Int = 224): RandomAlterAspect = {
    OpenCV.isOpenCVLoaded
    new RandomAlterAspect(min_area_ratio, max_area_ratio,
      min_aspect_ratio_change, interp_mode, cropLength)
  }
}


class RandomAlterAspect(min_area_ratio: Float = 0.08f,
                           max_area_ratio: Int = 1,
                           min_aspect_ratio_change: Float = 0.75f,
                           interp_mode: String = "CUBIC",
                           cropLength: Int = 224)
  extends FeatureTransformer {

  import com.intel.analytics.bigdl.utils.RandomGenerator.RNG

  @inline
  private def randRatio(min: Float, max: Float): Float = {
    val res = (RNG.uniform(1e-2, (max - min) * 1000 + 1) + min * 1000) / 1000
    res.toFloat
  }

  override protected def transformMat(feature: ImageFeature): Unit = {
    val h = feature.opencvMat().size().height
    val w = feature.opencvMat().size().width
    val area = h * w

    require(min_area_ratio <= max_area_ratio, "min_area_ratio should <= max_area_ratio")

    var attempt = 0
    while (attempt < 10) {
      val area_ratio = randRatio(min_area_ratio, max_area_ratio)
      val aspect_ratio_change = randRatio(min_aspect_ratio_change, 1 / min_aspect_ratio_change)
      val new_area = area_ratio * area
      var new_h = (sqrt(new_area) * aspect_ratio_change).toInt
      var new_w = (sqrt(new_area) / aspect_ratio_change).toInt
      if (randRatio(0, 1) < 0.5) {
        val tmp = new_h
        new_h = new_w
        new_w = tmp
      }
      if (new_h <= h && new_w <= w) {
        val y = RNG.uniform(1e-2, h - new_h + 1).toInt
        val x = RNG.uniform(1e-2, w - new_w + 1).toInt
        Crop.transform(feature.opencvMat(),
          feature.opencvMat(), x, y, x + new_w, y + new_h, false, false)

        Imgproc.resize(feature.opencvMat(), feature.opencvMat(),
            new Size(cropLength, cropLength), 0, 0, 2)
        attempt = 100
      }
      attempt += 1
    }
    if (attempt < 20) {
      val (new_h, new_w) = resizeImagePerShorterSize(feature.opencvMat(), cropLength)
      Imgproc.resize(feature.opencvMat(),
        feature.opencvMat(), new Size(cropLength, cropLength), 0, 0, 2)
    }
  }

  private def resizeImagePerShorterSize(img: Mat, shorter_size: Int) : (Int, Int) = {
    val h = img.size().height
    val w = img.size().width
    var new_h = shorter_size
    var new_w = shorter_size

    if (h < w) {
      new_w = (w / h * shorter_size).toInt
    } else {
      new_h = (h / w * shorter_size).toInt
    }
    (new_h, new_w)
  }
} 
Example 2
Source File: RelationWithItemToItem.scala    From AI   with Apache License 2.0 5 votes vote down vote up
package com.bigchange.mllib

import breeze.numerics.{sqrt, pow}
import org.apache.spark.{HashPartitioner, SparkConf, SparkContext}



object RelationWithItemToItem {

  def main(args: Array[String]) {

    val sc = new SparkContext(new SparkConf()
      .setAppName("Item to Item")
      .setMaster("local"))
    // announce the top number of items to get
    val topK = 2

    val userItem = sc.textFile("/rating.dat")
      .map(_.split("\t")).map(x =>(x(0),x(1),x(2))).distinct().cache()
    // cal item -> (user,rating) and item -> sqrt(ratings)
    val itemUser = userItem.map(x => (x._2,(x._1,x._3.toDouble))).partitionBy(new HashPartitioner(20))
    // sqrt : 规整化 rating 的值
    val itemPowSqrt = userItem.map(x => (x._2,pow(x._3.toDouble,2.0))).reduceByKey(_+_).mapValues(x => sqrt(x))
    // cal item -> ((user,rating),sqrt(ratings)) => user -> (item,rating/sqrt(ratings))
    val userItemSqrt = itemUser.join(itemPowSqrt).map(x =>{
      val item = x._1
      val sqrtRatings = x._2._2
      val user = x._2._1._1
      val rating = x._2._1._2
      (user,(item,rating / sqrtRatings))
    })
    // cal the relation of item to item in user dimension => get the score of item to item which connection the relation of items
    val itemToItem = userItemSqrt.join(userItemSqrt).map(x =>{
      val item1 = x._2._1._1
      val rating1 = x._2._1._2
      val item2 = x._2._2._1
      val rating2 = x._2._2._2
      val score = rating1 * rating2
      if(item1 == item2){
        ((item1,item2),-1.0)
      }else{
        ((item1,item2),score)
      }
    })

    itemToItem.reduceByKey(_+_).map(x => (x._1._1,(x._1._2,x._2))).groupByKey().foreach(x => {
      val sourceItem = x._1
      val topItem = x._2.toList.filter(_._2 > 0).sortWith(_._2 > _._2).take(topK)
      println(s"item = $sourceItem,topK relative item list:$topItem")
    })
    sc.stop()
  }

} 
Example 3
Source File: MathUtil.scala    From dbpedia-spotlight-model   with Apache License 2.0 5 votes vote down vote up
package org.dbpedia.spotlight.util

import breeze.linalg.{DenseVector, Transpose}
import breeze.numerics.sqrt
import org.apache.commons.math.util.FastMath



object MathUtil {

  val LOGZERO = Double.NegativeInfinity

  def isLogZero(x: Double): Boolean = x.isNegInfinity

  def exp(x: Double): Double = {
    if (x.isNegInfinity)
      0.0
    else
      FastMath.exp(x)
  }

  def ln(x: Double): Double = {
    if(x == 0.0)
      LOGZERO
    else
      FastMath.log(x)
  }

  def lnsum(a: Double, b: Double): Double = {
    if(a.isNegInfinity || b.isNegInfinity) {
      if(a.isNegInfinity)
        b
      else
        a
    } else {
      if(a > b)
        a + ln(1 + FastMath.exp(b-a))
      else
        b + ln(1 + FastMath.exp(a-b))
    }
  }

  def lnsum(seq: TraversableOnce[Double]): Double = {
    seq.foldLeft(MathUtil.ln(0.0))(MathUtil.lnsum)
  }

  def lnproduct(seq: TraversableOnce[Double]): Double = {
    seq.foldLeft(MathUtil.ln(1.0))(MathUtil.lnproduct)
  }

  def lnproduct(a: Double, b: Double): Double = {
    if (a.isNegInfinity || b.isNegInfinity)
      LOGZERO
    else
      a + b
  }

  def magnitude(vector: Transpose[DenseVector[Double]]): Double = {
    sqrt(vector * vector.t)
  }
  def magnitude(vector: Transpose[DenseVector[Float]]): Float = {
    sqrt(vector * vector.t)
  }

  def cosineSimilarity(vector1: Transpose[DenseVector[Double]], vector2: Transpose[DenseVector[Double]]): Double = {
    (vector1 * vector2.t) / (magnitude(vector1) * magnitude(vector2))
  }
  def cosineSimilarity(vector1: Transpose[DenseVector[Float]], vector2: Transpose[DenseVector[Float]]): Float = {
    (vector1 * vector2.t) / (magnitude(vector1) * magnitude(vector2))
  }
} 
Example 4
Source File: StandardScaler.scala    From keystone   with Apache License 2.0 5 votes vote down vote up
package keystoneml.nodes.stats

import breeze.linalg.DenseVector
import breeze.numerics.sqrt
import org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
import org.apache.spark.rdd.RDD
import keystoneml.utils.MLlibUtils
import keystoneml.workflow.{Transformer, Estimator}


  override def fit(data: RDD[DenseVector[Double]]): StandardScalerModel = {
    val summary = data.treeAggregate(new MultivariateOnlineSummarizer)(
      (aggregator, data) => aggregator.add(MLlibUtils.breezeVectorToMLlib(data)),
      (aggregator1, aggregator2) => aggregator1.merge(aggregator2))
    if (normalizeStdDev) {
      new StandardScalerModel(
        MLlibUtils.mllibVectorToDenseBreeze(summary.mean),
        Some(sqrt(MLlibUtils.mllibVectorToDenseBreeze(summary.variance))
            .map(r => if (r.isNaN | r.isInfinite | math.abs(r) < eps) 1.0 else r)))
    } else {
      new StandardScalerModel(
        MLlibUtils.mllibVectorToDenseBreeze(summary.mean),
        None)
    }
  }
} 
Example 5
Source File: Scaling.scala    From spark-gp   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ml.commons.util

import breeze.linalg.DenseVector
import breeze.numerics.sqrt
import org.apache.spark.ml.feature.LabeledPoint
import org.apache.spark.ml.linalg.Vectors
import org.apache.spark.rdd.RDD

private[ml] trait Scaling {
  def scale(data: RDD[LabeledPoint]) = {
    val x = data.map(x => DenseVector(x.features.toArray)).cache()
    val y = data.map(_.label)
    val n = x.count().toDouble
    val mean = x.reduce(_ + _) / n
    val centered = x.map(_ - mean).cache()
    val variance = centered.map(xx => xx *:* xx).reduce(_ + _) / n
    x.unpersist()
    val varianceNoZeroes = variance.map(v => if (v > 0d) v else 1d)
    val scaled = centered.map(_ /:/ sqrt(varianceNoZeroes)).map(_.toArray).map(Vectors.dense).zip(y).map {
      case(f, y) => LabeledPoint(y, f)
    }.cache()
    if (scaled.count() > 0) // ensure scaled is materialized
      centered.unpersist()
    scaled
  }
} 
Example 6
Source File: IntegratorTest.scala    From spark-gp   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.ml.commons.util

import breeze.numerics.{abs, sigmoid, sqrt}
import breeze.stats.distributions.{Gaussian, RandBasis}
import org.apache.commons.math3.random.MersenneTwister
import org.scalatest.FunSuite


class IntegratorTest extends FunSuite {

  test("testExpectedOfFunctionOfNormal") {
    val f = (x: Double) => sigmoid(x)
    val integrator = new Integrator(100)
    val mean = 0.5
    val variance = 3
    val sd = sqrt(variance)

    val testResult = integrator.expectedOfFunctionOfNormal(mean, variance, f)

    val gg = new Gaussian(mean, sd)(new RandBasis(new MersenneTwister()))
    val mcIters = 100000
    val values = gg.sample(mcIters).map(f)
    val mcResult = values.sum / mcIters
    val mcSD = sqrt(values.map(_ - mcResult).map(x => x * x).sum / mcIters) / sqrt(mcIters)
    assert(abs(mcResult - testResult) < 3 * mcSD)
  }

} 
Example 7
Source File: WaldTest.scala    From seqspark   with Apache License 2.0 5 votes vote down vote up
package org.dizhang.seqspark.stat

import breeze.linalg.{DenseMatrix, DenseVector, diag, inv}
import breeze.numerics.sqrt
import breeze.stats.distributions.StudentsT
import org.dizhang.seqspark.stat.HypoTest.NullModel
import org.dizhang.seqspark.stat.HypoTest.NullModel._


trait WaldTest {
  def nm: NullModel
  def x: DenseVector[Double]
  def reg: Regression = {
    nm match {
      case Simple(y, b) =>
        if (b)
          LogisticRegression(y, x.toDenseMatrix.t)
        else
          LinearRegression(y, x.toDenseMatrix.t)
      case Mutiple(y, c, b) =>
        if (b)
          LogisticRegression(y, DenseMatrix.horzcat(x.toDenseMatrix.t, c))
        else
          LinearRegression(y, DenseMatrix.horzcat(x.toDenseMatrix.t, c))
      case Fitted(y, _, xs, _, _, b) =>
        if (b)
          LogisticRegression(y, DenseMatrix.horzcat(x.toDenseMatrix.t, xs(::, 1 until xs.cols)))
        else
          LinearRegression(y, DenseMatrix.horzcat(x.toDenseMatrix.t, xs(::, 1 until xs.cols)))
    }
  }
  def beta: DenseVector[Double] = reg.coefficients
  def std: DenseVector[Double] = {
    sqrt(diag(inv(reg.information)))
  }
  def dof: Int = nm.dof - 1
  def t: DenseVector[Double] = beta /:/ std
  def pValue(oneSided: Boolean = true): DenseVector[Double] = {
    val dis = new StudentsT(dof)
    if (oneSided) {
      t.map(c => 1.0 - dis.cdf(c))
    } else {
      t.map(c => (1.0 - dis.cdf(math.abs(c))) * 2.0)
    }
  }
}

object WaldTest {

  def apply(nm: NullModel, x: DenseVector[Double]): WaldTest = {
    Default(nm, x)
  }

  case class Default(nm: NullModel, x: DenseVector[Double]) extends WaldTest

} 
Example 8
Source File: GPBasisFuncRegressionModel.scala    From DynaML   with Apache License 2.0 5 votes vote down vote up
package io.github.mandar2812.dynaml.models.gp

import breeze.linalg.{DenseMatrix, DenseVector, cholesky, trace, inv}
import breeze.numerics.{log, sqrt}
import io.github.mandar2812.dynaml.algebra._
import io.github.mandar2812.dynaml.analysis._
import io.github.mandar2812.dynaml.algebra.PartitionedMatrixOps._
import io.github.mandar2812.dynaml.algebra.PartitionedMatrixSolvers._
import io.github.mandar2812.dynaml.kernels._
import io.github.mandar2812.dynaml.models.{ContinuousProcessModel, SecondOrderProcessModel}
import io.github.mandar2812.dynaml.optimization.GloballyOptWithGrad
import io.github.mandar2812.dynaml.pipes.{DataPipe, DataPipe2}
import io.github.mandar2812.dynaml.probability.{MultGaussianPRV, MultGaussianRV}
import org.apache.log4j.Logger

import scala.reflect.ClassTag


abstract class GPBasisFuncRegressionModel[T, I: ClassTag](
  cov: LocalScalarKernel[I], n: LocalScalarKernel[I],
  data: T, num: Int, basisFunc: DataPipe[I, DenseVector[Double]],
  basis_param_prior: MultGaussianRV) extends AbstractGPRegressionModel[T, I](
  cov, n, data, num) {

  val MultGaussianRV(b, covB) = basis_param_prior

  implicit val vf = VectorField(b.length)

  private lazy val lowB = cholesky(covB)

  private lazy val covBsolveb = lowB.t \ (lowB \ b)

  private lazy val h: PartitionedMatrix = PartitionedMatrix.horzcat(_blockSize)(trainingData.map(basisFunc(_)):_*)

  override val mean: DataPipe[I, Double] = basisFunc > DataPipe((h: DenseVector[Double]) => h.t * b)

  private val basisFeatureMap: DataPipe[I, DenseVector[Double]] = basisFunc > DataPipe((x: DenseVector[Double]) => lowB*x)

  val feature_map_cov = CovarianceFunction(basisFunc > DataPipe((x: DenseVector[Double]) => lowB*x))

  override protected def getTrainKernelMatrix[U <: Seq[I]] = {
    SVMKernel.buildPartitionedKernelMatrix(trainingData,
      trainingData.length, _blockSize, _blockSize,
      (x: I, y: I) => {covariance.evaluate(x, y) + feature_map_cov.evaluate(x, y) + noiseModel.evaluate(x, y)}
    )
  }

  override protected def getCrossKernelMatrix[U <: Seq[I]](test: U) =
    SVMKernel.crossPartitonedKernelMatrix(
      trainingData, test, _blockSize, _blockSize,
      (x: I, y: I) => {covariance.evaluate(x, y) + feature_map_cov.evaluate(x, y)}
    )

  override protected def getTestKernelMatrix[U <: Seq[I]](test: U) =
    SVMKernel.buildPartitionedKernelMatrix(
      test, test.length.toLong,
      _blockSize, _blockSize,
      (x: I, y: I) => {covariance.evaluate(x, y) + feature_map_cov.evaluate(x, y)}
    )


} 
Example 9
Source File: LaplacePosteriorMode.scala    From DynaML   with Apache License 2.0 5 votes vote down vote up
package io.github.mandar2812.dynaml.optimization

import breeze.linalg.{DenseMatrix, DenseVector, cholesky, inv}
import breeze.numerics.sqrt
import io.github.mandar2812.dynaml.DynaMLPipe._
import io.github.mandar2812.dynaml.pipes.DataPipe
import io.github.mandar2812.dynaml.probability.Likelihood


  override def optimize(nPoints: Long,
                        ParamOutEdges: (DenseMatrix[Double], DenseVector[Double]),
                        initialP: DenseVector[Double]): DenseVector[Double] =
    LaplacePosteriorMode.run(
      nPoints, ParamOutEdges,
      this.likelihood, initialP,
      this.numIterations,
      identityPipe[(DenseMatrix[Double], DenseVector[Double])])
}

object LaplacePosteriorMode {

  def run[T](nPoints: Long, data: T,
             likelihood: Likelihood[
               DenseVector[Double], DenseVector[Double], DenseMatrix[Double],
               (DenseVector[Double], DenseVector[Double])],
             initialP: DenseVector[Double], numIterations: Int,
             transform: DataPipe[T, (DenseMatrix[Double], DenseVector[Double])]): DenseVector[Double] = {

    val (kMat, y) = transform(data)
    var mode = initialP

    var b = DenseVector.zeros[Double](y.length)
    var a = DenseVector.zeros[Double](y.length)

    val id = DenseMatrix.eye[Double](y.length)

    (1 to numIterations).foreach{ iter =>
      val wMat = likelihood.hessian(y, mode) * -1.0
      val wMatsq = sqrt(wMat)

      val L = cholesky(id + wMatsq*kMat*wMatsq)
      b = wMat*mode + likelihood.gradient(y, mode)
      val buff1 = wMatsq*kMat*b
      val buff2 = inv(L)*buff1

      a = b - inv(wMatsq*L.t)*buff2
      mode = kMat*a
    }

    mode

  }
} 
Example 10
Source File: BaselIlluminationPrior.scala    From parametric-face-image-generator   with Apache License 2.0 5 votes vote down vote up
package faces.utils

import java.io.File

import breeze.numerics.sqrt
import scalismo.faces.io.RenderParameterIO
import scalismo.faces.parameters.SphericalHarmonicsLight
import scalismo.geometry.Vector3D
import scalismo.statisticalmodel.MultivariateNormalDistribution
import scalismo.utils.Random

case class BaselIlluminationPrior(dir: String, nocolor: Boolean = false, setEnergy: Boolean = false, energy: Double = 6.33){
  require(new File(dir).exists(), "Illumination Prior path does not exist")

  // search all parameter files to estimate illumination
  lazy val files = {
    val listFiles = new File(dir).listFiles.filter(_.getName.endsWith(".rps")).toIndexedSeq
    listFiles
  }

  // load files holding illumination parameters (empirical distribution)
  lazy val allIllumination = files.map(f => {
    val rps = RenderParameterIO.read(f).get
    rps.environmentMap
  })

  // load spherical harmonics into a vectorized representation
  lazy val allIlluminationData = allIllumination.map(i => i.toBreezeVector)

  //calculate multinormaldistribution of illumination data
  lazy val mnd = MultivariateNormalDistribution.estimateFromData(allIlluminationData)

  // generates a random Illumination condition following the empirical distribution on the Basel Illumination Prior 2017 data
  private def rndEmpirical (implicit rnd: Random) : SphericalHarmonicsLight = {
    allIllumination(rnd.scalaRandom.nextInt(allIllumination.length))
  }

  // generates a random Illumination condition following the a multivariate normal distribution on the Basel Illumination Prior 2017 data
  private def rndMND (implicit rnd: Random): SphericalHarmonicsLight = {
    val sample = mnd.sample()
    SphericalHarmonicsLight.fromBreezeVector(sample)
  }

  // choose a an illumination based on defined distribution
  def rnd(illumination: String)(implicit rnd: Random): SphericalHarmonicsLight = {
    val colored = {
      val random = illumination match {
        case "staticFrontal" => SphericalHarmonicsLight.frontal.withNumberOfBands(2)
        case "empirical" => rndEmpirical
        case "multiVariateNormal" => rndMND
        case _ => throw new Exception("please choose a valid illumination setting")
      }

      // fixes the energy of the illumination if setEnergy is set to true
      if (setEnergy) {
        val factor = energy/ random.coefficients.map(_.norm2).sum
        SphericalHarmonicsLight(random.coefficients.map(c => c*sqrt(factor)))
      }
        else
        random
    }

    // removes all color from illumination and takes average intensity over color channels per coefficient instead
    if (nocolor){
      val intensities = colored.coefficients.map(f => {
        val mean = f.toArray.sum / 3.0
        Vector3D(mean, mean, mean)
      })
      SphericalHarmonicsLight(intensities)
    }
    else colored
  }

} 
Example 11
Source File: UCB1.scala    From banditsbook-scala   with MIT License 5 votes vote down vote up
package com.github.everpeace.banditsbook.algorithm.ucb

import breeze.numerics.sqrt
import breeze.stats.distributions.{Rand, RandBasis}
import breeze.storage.Zero
import com.github.everpeace.banditsbook.algorithm.Algorithm
import com.github.everpeace.banditsbook.arm.Arm

import scala.collection.immutable.Seq
import scala.reflect.ClassTag


object UCB1 {

  import breeze.linalg._
  import Vector._

  case class State(counts: Vector[Int], expectations: Vector[Double])

  def Algorithm(implicit zeroReward: Zero[Double], zeroInt: Zero[Int], tag: ClassTag[Double], rand: RandBasis = Rand)
  = {
    new Algorithm[Double, State] {

      override def initialState(arms: Seq[Arm[Double]]): State = State(
        zeros(arms.size), zeros(arms.size)
      )

      override def selectArm(arms: Seq[Arm[Double]], state: State): Int = {
        val counts = state.counts
        val expectations = state.expectations
        val step = sum(counts)
        val factor = fill(counts.size)(2 * scala.math.log(step))
        val bonus = sqrt(factor / counts.map(_.toDouble))
        val score = expectations + bonus
        argmax(score)
      }

      override def updateState(arms: Seq[Arm[Double]], state: State, chosen: Int, reward: Double): State = {
        val counts = state.counts
        val expectations = state.expectations
        val count = counts(chosen) + 1
        counts.update(chosen, count)

        val expectation = (((count - 1) / count.toDouble) * expectations(chosen)) + ((1 / count.toDouble) * reward)
        expectations.update(chosen, expectation)

        state.copy(counts = counts, expectations = expectations)
      }
    }
  }
} 
Example 12
Source File: ExpectedImprovement.scala    From pravda-ml   with Apache License 2.0 5 votes vote down vote up
package com.linkedin.photon.ml.hyperparameter.criteria

import breeze.linalg.DenseVector
import breeze.numerics.sqrt
import breeze.stats.distributions.Gaussian
import com.linkedin.photon.ml.hyperparameter.estimators.PredictionTransformation


  def apply(
      predictiveMeans: DenseVector[Double],
      predictiveVariances: DenseVector[Double]): DenseVector[Double] = {

    val std = sqrt(predictiveVariances)

    // PBO Eq. 1
    val gamma = - (predictiveMeans - bestEvaluation) / std

    // Eq. 2
    std :* ((gamma :* gamma.map(standardNormal.cdf)) + gamma.map(standardNormal.pdf))
  }
} 
Example 13
Source File: Norms.scala    From doddle-model   with Apache License 2.0 5 votes vote down vote up
package io.picnicml.doddlemodel.preprocessing

import breeze.linalg.{Axis, max, sum}
import breeze.numerics.{abs, pow, sqrt}
import io.picnicml.doddlemodel.data.{Features, RealVector}

object Norms {

  sealed trait Norm {
    def apply(x: Features): RealVector
  }

  final case object L1Norm extends Norm {
    override def apply(x: Features): RealVector = sum(abs(x), Axis._1)
  }

  final case object L2Norm extends Norm {
    override def apply(x: Features): RealVector = sqrt(sum(pow(x, 2), Axis._1))
  }

  final case object MaxNorm extends Norm {
    override def apply(x: Features): RealVector = max(abs(x), Axis._1)
  }
}