scala.language.reflectiveCalls Scala Examples

The following examples show how to use scala.language.reflectiveCalls. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: HasRawPredictionColumnParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.selections.NameSingleColumnSelection
import io.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasRawPredictionColumnParam extends Params {

  val rawPredictionColumn =
    new SingleColumnSelectorParamWrapper[
        ml.param.Params { val rawPredictionCol: ml.param.Param[String] }](
      name = "raw prediction column",
      description = Some("The raw prediction (confidence) column."),
      sparkParamGetter = _.rawPredictionCol,
      portIndex = 0)
  setDefault(rawPredictionColumn, NameSingleColumnSelection("rawPrediction"))
} 
Example 2
Source File: HasSolverParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.param.{Param => SparkParam}

import io.deepsense.deeplang.params.choice.Choice
import io.deepsense.deeplang.params.wrappers.spark.ChoiceParamWrapper
import io.deepsense.deeplang.params.{Param, Params}

trait HasSolverParam extends Params {
  val solver =
    new ChoiceParamWrapper[
        ml.param.Params { val solver: SparkParam[String]}, SolverChoice.SolverOption](
      name = "solver",
      sparkParamGetter = _.solver,
      description =
        Some("""Sets the solver algorithm used for optimization.
          |Can be set to "l-bfgs", "normal" or "auto".
          |"l-bfgs" denotes Limited-memory BFGS which is a limited-memory quasi-Newton
          |optimization method. "normal" denotes Normal Equation. It is an analytical
          |solution to the linear regression problem.
          |The default value is "auto" which means that the solver algorithm is
          |selected automatically.""".stripMargin))

  setDefault(solver, SolverChoice.Auto())
}

object SolverChoice {

  sealed abstract class SolverOption(override val name: String) extends Choice {

    override val params: Array[Param[_]] = Array()

    override val choiceOrder: List[Class[_ <: SolverOption]] = List(
      classOf[Auto],
      classOf[Normal],
      classOf[LBFGS]
    )
  }

  case class Auto() extends SolverOption("auto")
  case class Normal() extends SolverOption("normal")
  case class LBFGS() extends SolverOption("l-bfgs")
} 
Example 3
Source File: HasMinTermsFrequencyParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.params.validators.RangeValidator
import io.deepsense.deeplang.params.wrappers.spark.DoubleParamWrapper

trait HasMinTermsFrequencyParam
  extends HasInputColumn
  with HasOutputColumn {

  val minTF = new DoubleParamWrapper[ml.param.Params { val minTF: ml.param.DoubleParam }](
    name = "min term frequency",
    description =
      Some("""A filter to ignore rare words in a document. For each document, terms with
        |a frequency/count less than the given threshold are ignored. If this is an integer >= 1,
        |then this specifies a count (of times the term must appear in the document); if this is
        |a double in [0,1), then it specifies a fraction (out of the document's token count).
        |Note that the parameter is only used in transform of CountVectorizer model and does not
        |affect fitting.""".stripMargin),
    sparkParamGetter = _.minTF,
    RangeValidator(0.0, Double.MaxValue))
  setDefault(minTF, 1.0)

  def setMinTF(value: Double): this.type = {
    set(minTF, value)
  }
} 
Example 4
Source File: HasSubsamplingRateParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.validators.RangeValidator
import io.deepsense.deeplang.params.wrappers.spark.{DoubleParamWrapper, IntParamWrapper}

trait HasSubsamplingRateParam extends Params {

  val subsamplingRate =
    new DoubleParamWrapper[ml.param.Params { val subsamplingRate: ml.param.DoubleParam }](
      name = "subsampling rate",
      description =
        Some("The fraction of the training data used for learning each decision tree."),
      sparkParamGetter = _.subsamplingRate,
      RangeValidator(0.0, 1.0, beginIncluded = false))
  setDefault(subsamplingRate, 1.0)

} 
Example 5
Source File: HasOptionalQuantilesColumnParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.param.{Param => SparkParam}

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.choice.{Choice, ChoiceParam}
import io.deepsense.deeplang.params.wrappers.spark.{ParamsWithSparkWrappers, SingleColumnCreatorParamWrapper}

trait HasOptionalQuantilesColumnParam extends Params {

  val optionalQuantilesColumn =
    new ChoiceParam[OptionalQuantilesColumnChoice.QuantilesColumnOption](
      name = "use custom quantiles",
      description =
        Some("""Param for quantiles column name.
          |This column will output quantiles of corresponding
          |quantileProbabilities if it is set.""".stripMargin))

  setDefault(optionalQuantilesColumn, OptionalQuantilesColumnChoice.QuantilesColumnNoOption())
}

object OptionalQuantilesColumnChoice {

  sealed trait QuantilesColumnOption extends Choice with ParamsWithSparkWrappers {
    override val choiceOrder: List[Class[_ <: QuantilesColumnOption]] = List(
      classOf[QuantilesColumnNoOption],
      classOf[QuantilesColumnYesOption])
  }

  case class QuantilesColumnYesOption() extends QuantilesColumnOption {
    val quantilesColumn = new SingleColumnCreatorParamWrapper[
        ml.param.Params { val quantilesCol: SparkParam[String]}](
      name = "quantiles column",
      description = Some("The quantiles column for a model."),
      sparkParamGetter = _.quantilesCol)
    setDefault(quantilesColumn, "quantiles")

    override val name = "yes"
    override val params: Array[io.deepsense.deeplang.params.Param[_]] = Array(quantilesColumn)
  }

  case class QuantilesColumnNoOption() extends QuantilesColumnOption {
    override val name = "no"
    override val params: Array[io.deepsense.deeplang.params.Param[_]] = Array()
  }
} 
Example 6
Source File: HasInputColumn.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.selections.NameSingleColumnSelection
import io.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasInputColumn extends Params {

  val inputColumn = new SingleColumnSelectorParamWrapper[
      ml.param.Params { val inputCol: ml.param.Param[String] }](
    name = "input column",
    description = Some("The input column name."),
    sparkParamGetter = _.inputCol,
    portIndex = 0)

  def setInputColumn(value: String): this.type = {
    set(inputColumn, NameSingleColumnSelection(value))
  }
} 
Example 7
Source File: HasMaxBinsParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.regression.RandomForestRegressor

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.validators.RangeValidator
import io.deepsense.deeplang.params.wrappers.spark.{IntParamWrapper, LongParamWrapper}

trait HasMaxBinsParam extends Params {

  val maxBins = new IntParamWrapper[ml.param.Params { val maxBins: ml.param.IntParam }](
    name = "max bins",
    description = Some("The maximum number of bins used for discretizing continuous features " +
      "and for choosing how to split on features at each node. " +
      "More bins give higher granularity. " +
      "Must be >= 2 and >= number of categories in any categorical feature."),
    sparkParamGetter = _.maxBins,
    RangeValidator(2.0, Int.MaxValue, step = Some(1.0)))
  setDefault(maxBins, 32.0)

} 
Example 8
Source File: HasUserColumnParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.selections.NameSingleColumnSelection
import io.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasUserColumnParam extends Params {

  val userColumn =
    new SingleColumnSelectorParamWrapper[
      ml.param.Params { val userCol: ml.param.Param[String] }](
      name = "user column",
      description = Some("The column for user ids."),
      sparkParamGetter = _.userCol,
      portIndex = 0)
  setDefault(userColumn, NameSingleColumnSelection("user"))
} 
Example 9
Source File: HasFeaturesColumnParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.selections.{NameSingleColumnSelection, SingleColumnSelection}
import io.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasFeaturesColumnParam extends Params {

  val featuresColumn =
    new SingleColumnSelectorParamWrapper[
      ml.param.Params { val featuresCol: ml.param.Param[String] }](
      name = "features column",
      description = Some("The features column for model fitting."),
      sparkParamGetter = _.featuresCol,
      portIndex = 0)
  setDefault(featuresColumn, NameSingleColumnSelection("features"))

  def setFeaturesColumn(value: SingleColumnSelection): this.type = set(featuresColumn, value)
} 
Example 10
Source File: HasLabelColumnParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.selections.{SingleColumnSelection, NameSingleColumnSelection}
import io.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasLabelColumnParam extends Params {

  val labelColumn =
    new SingleColumnSelectorParamWrapper[
        ml.param.Params { val labelCol: ml.param.Param[String] }](
      name = "label column",
      description = Some("The label column for model fitting."),
      sparkParamGetter = _.labelCol,
      portIndex = 0)
  setDefault(labelColumn, NameSingleColumnSelection("label"))

  def setLabelColumn(value: SingleColumnSelection): this.type = set(labelColumn, value)
} 
Example 11
Source File: HasElasticNetParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.validators.RangeValidator
import io.deepsense.deeplang.params.wrappers.spark.DoubleParamWrapper

trait HasElasticNetParam extends Params {

  val elasticNetParam = new DoubleParamWrapper[
      ml.param.Params { val elasticNetParam: ml.param.DoubleParam }](
    name = "elastic net param",
    description = Some("The ElasticNet mixing parameter. " +
      "For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty."),
    sparkParamGetter = _.elasticNetParam,
    validator = RangeValidator(0.0, 1.0))
  setDefault(elasticNetParam, 0.0)
} 
Example 12
Source File: HasOptionalWeightColumnParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.param.{Param => SparkParam}

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.choice.{Choice, ChoiceParam}
import io.deepsense.deeplang.params.selections.{NameSingleColumnSelection, SingleColumnSelection}
import io.deepsense.deeplang.params.wrappers.spark.{ParamsWithSparkWrappers, SingleColumnSelectorParamWrapper}

trait HasOptionalWeightColumnParam extends Params {

  val optionalWeightColumn =
    new ChoiceParam[OptionalWeightColumnChoice.WeightColumnOption](
      name = "use custom weights",
      description =
        Some("""Whether to over-/under-sample training instances according to the given weights in
          |the `weight column`. If the `weight column` is not specified,
          |all instances are treated equally with a weight 1.0.""".stripMargin))

  setDefault(optionalWeightColumn, OptionalWeightColumnChoice.WeightColumnNoOption())
}

object OptionalWeightColumnChoice {

  sealed trait WeightColumnOption
    extends Choice with ParamsWithSparkWrappers {
    override val choiceOrder: List[Class[_ <: WeightColumnOption]] = List(
      classOf[WeightColumnNoOption],
      classOf[WeightColumnYesOption])
  }

  case class WeightColumnYesOption() extends WeightColumnOption {
    val weightColumn = new SingleColumnSelectorParamWrapper[
      ml.param.Params { val weightCol: SparkParam[String]}](
      name = "weight column",
      description = Some("The weight column for a model."),
      sparkParamGetter = _.weightCol,
      portIndex = 0)
    setDefault(weightColumn, NameSingleColumnSelection("weight"))

    def getWeightColumn: SingleColumnSelection = $(weightColumn)
    def setWeightColumn(value: SingleColumnSelection): this.type = set(weightColumn -> value)

    override val name = "yes"
    override val params: Array[io.deepsense.deeplang.params.Param[_]] = Array(weightColumn)
  }

  case class WeightColumnNoOption() extends WeightColumnOption {
    override val name = "no"
    override val params: Array[io.deepsense.deeplang.params.Param[_]] = Array()
  }
} 
Example 13
Source File: HasMinInstancePerNodeParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.regression.RandomForestRegressor

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.validators.RangeValidator
import io.deepsense.deeplang.params.wrappers.spark.{IntParamWrapper, DoubleParamWrapper}

trait HasMinInstancePerNodeParam extends Params {

  val minInstancesPerNode =
    new IntParamWrapper[ml.param.Params { val minInstancesPerNode: ml.param.IntParam }](
      name = "min instances per node",
      description = Some("The minimum number of instances each child must have after split. " +
        "If a split causes the left or right child to have fewer instances than the parameter's " +
        "value, the split will be discarded as invalid."),
      sparkParamGetter = _.minInstancesPerNode,
      RangeValidator(1.0, Int.MaxValue, step = Some(1.0)))
  setDefault(minInstancesPerNode, 1.0)

} 
Example 14
Source File: HasOutputColumn.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.selections.NameSingleColumnSelection
import io.deepsense.deeplang.params.wrappers.spark.SingleColumnCreatorParamWrapper

trait HasOutputColumn extends Params {

  val outputColumn = new SingleColumnCreatorParamWrapper[
      ml.param.Params { val outputCol: ml.param.Param[String] }](
    name = "output column",
    description = Some("The output column name."),
    sparkParamGetter = _.outputCol)

  def setOutputColumn(value: String): this.type = {
    set(outputColumn, value)
  }
} 
Example 15
Source File: HasPredictionColumnSelectorParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.selections.NameSingleColumnSelection
import io.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasPredictionColumnSelectorParam extends Params {

  val predictionColumn =
    new SingleColumnSelectorParamWrapper[
        ml.param.Params { val predictionCol: ml.param.Param[String] }](
      name = "prediction column",
      description = Some("The prediction column."),
      sparkParamGetter = _.predictionCol,
      portIndex = 0)
  setDefault(predictionColumn, NameSingleColumnSelection("prediction"))
} 
Example 16
Source File: QuantileDiscretizerModel.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.models

import scala.language.reflectiveCalls

import org.apache.spark.ml.feature.{Bucketizer => SparkQuantileDiscretizerModel, QuantileDiscretizer => SparkQuantileDiscretizer}

import io.deepsense.deeplang.ExecutionContext
import io.deepsense.deeplang.doperables.SparkSingleColumnModelWrapper
import io.deepsense.deeplang.doperables.report.CommonTablesGenerators.SparkSummaryEntry
import io.deepsense.deeplang.doperables.report.{CommonTablesGenerators, Report}
import io.deepsense.deeplang.doperables.serialization.SerializableSparkModel
import io.deepsense.deeplang.params.Param

class QuantileDiscretizerModel
  extends SparkSingleColumnModelWrapper[SparkQuantileDiscretizerModel, SparkQuantileDiscretizer] {

  
  override protected def getSpecificParams: Array[Param[_]] = Array()

  override def report: Report = {
    val summary =
      List(
        SparkSummaryEntry(
          name = "splits",
          value = sparkModel.getSplits,
          description = "Split points for mapping continuous features into buckets."))

    super.report
      .withAdditionalTable(CommonTablesGenerators.modelSummary(summary))
  }

  override protected def loadModel(
      ctx: ExecutionContext,
      path: String): SerializableSparkModel[SparkQuantileDiscretizerModel] = {
    new SerializableSparkModel(SparkQuantileDiscretizerModel.load(path))
  }

} 
Example 17
Source File: PCAModel.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.models

import scala.language.reflectiveCalls

import org.apache.spark.ml.feature.{PCA => SparkPCA, PCAModel => SparkPCAModel}

import io.deepsense.deeplang.ExecutionContext
import io.deepsense.deeplang.doperables.SparkSingleColumnModelWrapper
import io.deepsense.deeplang.doperables.report.{CommonTablesGenerators, Report}
import io.deepsense.deeplang.doperables.serialization.SerializableSparkModel
import io.deepsense.deeplang.params.Param
import io.deepsense.sparkutils.ML

class PCAModel
  extends SparkSingleColumnModelWrapper[SparkPCAModel, SparkPCA] {

  override protected def getSpecificParams: Array[Param[_]] = Array()

  override def report: Report = {
    super.report
      .withAdditionalTable(CommonTablesGenerators.denseMatrix(
        name = "A Principal Components Matrix",
        description = "Each column is one principal component.",
        matrix = ML.ModelParams.pcFromPCAModel(sparkModel)))
  }

  override protected def loadModel(
      ctx: ExecutionContext,
      path: String): SerializableSparkModel[SparkPCAModel] = {
    new SerializableSparkModel(SparkPCAModel.load(path))
  }
} 
Example 18
Source File: GBTRegression.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.regression.{GBTRegressionModel => SparkGBTRegressionModel, GBTRegressor => SparkGBTRegressor}

import io.deepsense.deeplang.doperables.SparkEstimatorWrapper
import io.deepsense.deeplang.doperables.spark.wrappers.models.GBTRegressionModel
import io.deepsense.deeplang.doperables.spark.wrappers.params.GBTParams
import io.deepsense.deeplang.doperables.spark.wrappers.params.common.HasRegressionImpurityParam
import io.deepsense.deeplang.params.Param
import io.deepsense.deeplang.params.choice.Choice
import io.deepsense.deeplang.params.wrappers.spark.ChoiceParamWrapper

class GBTRegression
  extends SparkEstimatorWrapper[
    SparkGBTRegressionModel,
    SparkGBTRegressor,
    GBTRegressionModel]
  with GBTParams
  with HasRegressionImpurityParam {

  import GBTRegression._

  override lazy val maxIterationsDefault = 20.0

  val lossType = new ChoiceParamWrapper[
    ml.param.Params { val lossType: ml.param.Param[String] }, LossType](
    name = "loss function",
    description = Some("The loss function which GBT tries to minimize."),
    sparkParamGetter = _.lossType)
  setDefault(lossType, Squared())

  override val params: Array[Param[_]] = Array(
    impurity,
    lossType,
    maxBins,
    maxDepth,
    maxIterations,
    minInfoGain,
    minInstancesPerNode,
    seed,
    stepSize,
    subsamplingRate,
    labelColumn,
    featuresColumn,
    predictionColumn)
}

object GBTRegression {

  sealed abstract class LossType(override val name: String) extends Choice {
    override val params: Array[Param[_]] = Array()

    override val choiceOrder: List[Class[_ <: Choice]] = List(
      classOf[Squared],
      classOf[Absolute]
    )
  }
  case class Squared() extends LossType("squared")
  case class Absolute() extends LossType("absolute")

} 
Example 19
Source File: NaiveBayes.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.classification.{NaiveBayes => SparkNaiveBayes, NaiveBayesModel => SparkNaiveBayesModel}

import io.deepsense.deeplang.doperables.SparkEstimatorWrapper
import io.deepsense.deeplang.doperables.spark.wrappers.estimators.NaiveBayes.{ModelType, Multinomial}
import io.deepsense.deeplang.doperables.spark.wrappers.models.NaiveBayesModel
import io.deepsense.deeplang.doperables.spark.wrappers.params.common._
import io.deepsense.deeplang.params.Param
import io.deepsense.deeplang.params.choice.Choice
import io.deepsense.deeplang.params.validators.RangeValidator
import io.deepsense.deeplang.params.wrappers.spark.{ChoiceParamWrapper, DoubleParamWrapper}

class NaiveBayes
  extends SparkEstimatorWrapper[
    SparkNaiveBayesModel,
    SparkNaiveBayes,
    NaiveBayesModel]
  with ProbabilisticClassifierParams
  with HasLabelColumnParam {

  val smoothing = new DoubleParamWrapper[ml.param.Params { val smoothing: ml.param.DoubleParam }](
    name = "smoothing",
    description = Some("The smoothing parameter."),
    sparkParamGetter = _.smoothing,
    validator = RangeValidator(begin = 0.0, end = Double.MaxValue))
  setDefault(smoothing, 1.0)

  val modelType =
    new ChoiceParamWrapper[ml.param.Params { val modelType: ml.param.Param[String] }, ModelType](
      name = "modelType",
      description = Some("The model type."),
      sparkParamGetter = _.modelType)
  setDefault(modelType, Multinomial())


  override val params: Array[Param[_]] = Array(
    smoothing,
    modelType,
    labelColumn,
    featuresColumn,
    probabilityColumn,
    rawPredictionColumn,
    predictionColumn)
}

object NaiveBayes {

  sealed abstract class ModelType(override val name: String) extends Choice {

    override val params: Array[Param[_]] = Array()

    override val choiceOrder: List[Class[_ <: Choice]] = List(
      classOf[Multinomial],
      classOf[Bernoulli]
    )
  }

  case class Multinomial() extends ModelType("multinomial")

  case class Bernoulli() extends ModelType("bernoulli")
} 
Example 20
Source File: StringIndexerEstimator.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml.feature.{StringIndexer => SparkStringIndexer, StringIndexerModel => SparkStringIndexerModel}

import io.deepsense.deeplang.doperables.multicolumn.MultiColumnParams.SingleOrMultiColumnChoices.SingleColumnChoice
import io.deepsense.deeplang.doperables.spark.wrappers.models.{MultiColumnStringIndexerModel, SingleColumnStringIndexerModel, StringIndexerModel}
import io.deepsense.deeplang.doperables.{SparkMultiColumnEstimatorWrapper, SparkSingleColumnEstimatorWrapper}
import io.deepsense.deeplang.params.Param

class StringIndexerEstimator
  extends SparkMultiColumnEstimatorWrapper[
    SparkStringIndexerModel,
    SparkStringIndexer,
    StringIndexerModel,
    SingleColumnStringIndexerModel,
    SingleStringIndexer,
    MultiColumnStringIndexerModel] {

  setDefault(singleOrMultiChoiceParam, SingleColumnChoice())

  override def getSpecificParams: Array[Param[_]] = Array()
}

class SingleStringIndexer
  extends SparkSingleColumnEstimatorWrapper[
    SparkStringIndexerModel,
    SparkStringIndexer,
    SingleColumnStringIndexerModel] {

  override def getSpecificParams: Array[Param[_]] = Array()
} 
Example 21
Source File: CountVectorizerEstimator.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.feature.{CountVectorizer => SparkCountVectorizer, CountVectorizerModel => SparkCountVectorizerModel}

import io.deepsense.deeplang.doperables.SparkSingleColumnEstimatorWrapper
import io.deepsense.deeplang.doperables.spark.wrappers.models.CountVectorizerModel
import io.deepsense.deeplang.doperables.spark.wrappers.params.common._
import io.deepsense.deeplang.params.Param
import io.deepsense.deeplang.params.validators.RangeValidator
import io.deepsense.deeplang.params.wrappers.spark.{DoubleParamWrapper, IntParamWrapper}

class CountVectorizerEstimator
  extends SparkSingleColumnEstimatorWrapper[
    SparkCountVectorizerModel,
    SparkCountVectorizer,
    CountVectorizerModel]
  with HasMinTermsFrequencyParam {

  val minDF = new DoubleParamWrapper[ml.param.Params { val minDF: ml.param.DoubleParam }](
    name = "min different documents",
    description = Some("Specifies the minimum number of different documents " +
      "a term must appear in to be included in the vocabulary."),
    sparkParamGetter = _.minDF,
    RangeValidator(0.0, Double.MaxValue))
  setDefault(minDF, 1.0)

  val vocabSize = new IntParamWrapper[ml.param.Params { val vocabSize: ml.param.IntParam }](
    name = "max vocabulary size",
    description = Some("The maximum size of the vocabulary."),
    sparkParamGetter = _.vocabSize,
    RangeValidator(0.0, Int.MaxValue, beginIncluded = false, step = Some(1.0)))
  setDefault(vocabSize, (1 << 18).toDouble)

  override protected def getSpecificParams: Array[Param[_]] = Array(vocabSize, minDF, minTF)
} 
Example 22
Source File: ChiSqSelectorEstimator.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.feature.{ChiSqSelector => SparkChiSqSelector, ChiSqSelectorModel => SparkChiSqSelectorModel}

import io.deepsense.deeplang.doperables.SparkEstimatorWrapper
import io.deepsense.deeplang.doperables.spark.wrappers.models.ChiSqSelectorModel
import io.deepsense.deeplang.doperables.spark.wrappers.params.common.{HasFeaturesColumnParam, HasLabelColumnParam, HasOutputColumn}
import io.deepsense.deeplang.params.Param
import io.deepsense.deeplang.params.validators.RangeValidator
import io.deepsense.deeplang.params.wrappers.spark.IntParamWrapper

class ChiSqSelectorEstimator
  extends SparkEstimatorWrapper[
    SparkChiSqSelectorModel,
    SparkChiSqSelector,
    ChiSqSelectorModel]
  with HasFeaturesColumnParam
  with HasOutputColumn
  with HasLabelColumnParam{

  val numTopFeatures = new IntParamWrapper[
    ml.param.Params { val numTopFeatures: ml.param.IntParam }](
    name = "num top features",
    description = Some("Number of features that selector will select, ordered by statistics value " +
      "descending. If the real number of features is lower, then this will select all " +
      "features."),
    sparkParamGetter = _.numTopFeatures,
    validator = RangeValidator(begin = 1.0, end = Int.MaxValue, step = Some(1.0)))
  setDefault(numTopFeatures -> 50)

  override val params: Array[Param[_]] = Array(
    numTopFeatures,
    featuresColumn,
    outputColumn,
    labelColumn)

  def setNumTopFeatures(value: Int): this.type = set(numTopFeatures -> value)
} 
Example 23
Source File: QuantileDiscretizerEstimator.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.feature.{Bucketizer => SparkQuantileDiscretizerModel, QuantileDiscretizer => SparkQuantileDiscretizer}

import io.deepsense.deeplang.doperables.SparkSingleColumnEstimatorWrapper
import io.deepsense.deeplang.doperables.spark.wrappers.models.QuantileDiscretizerModel
import io.deepsense.deeplang.params.Param
import io.deepsense.deeplang.params.validators.RangeValidator
import io.deepsense.deeplang.params.wrappers.spark.IntParamWrapper

class QuantileDiscretizerEstimator
  extends SparkSingleColumnEstimatorWrapper[
    SparkQuantileDiscretizerModel,
    SparkQuantileDiscretizer,
    QuantileDiscretizerModel] {

  val numBuckets = new IntParamWrapper[ml.param.Params { val numBuckets: ml.param.IntParam }](
    name = "num buckets",
    description = Some("Maximum number of buckets (quantiles or categories) " +
      "into which the data points are grouped. Must be >= 2."),
    sparkParamGetter = _.numBuckets,
    RangeValidator(2.0, Int.MaxValue, step = Some(1.0)))
  setDefault(numBuckets, 2.0)

  override protected def getSpecificParams: Array[Param[_]] = Array(numBuckets)

  def setNumBuckets(value: Int): this.type = set(numBuckets -> value)
} 
Example 24
Source File: AFTSurvivalRegression.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.regression.{AFTSurvivalRegression => SparkAFTSurvivalRegression, AFTSurvivalRegressionModel => SparkAFTSurvivalRegressionModel}

import io.deepsense.deeplang.doperables.SparkEstimatorWrapper
import io.deepsense.deeplang.doperables.spark.wrappers.models.AFTSurvivalRegressionModel
import io.deepsense.deeplang.doperables.spark.wrappers.params.AFTSurvivalRegressionParams
import io.deepsense.deeplang.doperables.spark.wrappers.params.common.{HasFitIntercept, HasLabelColumnParam, HasMaxIterationsParam, HasTolerance}
import io.deepsense.deeplang.params.Param
import io.deepsense.deeplang.params.selections.NameSingleColumnSelection
import io.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

class AFTSurvivalRegression
  extends SparkEstimatorWrapper[
    SparkAFTSurvivalRegressionModel,
    SparkAFTSurvivalRegression,
    AFTSurvivalRegressionModel]
  with AFTSurvivalRegressionParams
  with HasLabelColumnParam
  with HasMaxIterationsParam
  with HasTolerance
  with HasFitIntercept {

  val censorColumn =
    new SingleColumnSelectorParamWrapper[
      ml.param.Params { val censorCol: ml.param.Param[String] }](
      name = "censor column",
      description = Some("""Param for censor column name.
                      |The value of this column could be 0 or 1.
                      |If the value is 1, it means the event has occurred i.e. uncensored;
                      |otherwise censored.""".stripMargin),
      sparkParamGetter = _.censorCol,
      portIndex = 0)
  setDefault(censorColumn, NameSingleColumnSelection("censor"))

  override val params: Array[Param[_]] = Array(
    fitIntercept,
    maxIterations,
    tolerance,
    labelColumn,
    censorColumn,
    featuresColumn,
    predictionColumn,
    quantileProbabilities,
    optionalQuantilesColumn)
} 
Example 25
Source File: EstimatorModelWrapperFixtures.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.ml
import org.apache.spark.ml.param.{ParamMap, Param => SparkParam}
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}

import io.deepsense.deeplang.ExecutionContext
import io.deepsense.deeplang.doperables.report.Report
import io.deepsense.deeplang.doperables.serialization.SerializableSparkModel
import io.deepsense.deeplang.doperables.{SparkEstimatorWrapper, SparkModelWrapper}
import io.deepsense.deeplang.params.wrappers.spark.SingleColumnCreatorParamWrapper
import io.deepsense.deeplang.params.{Param, Params}
import io.deepsense.sparkutils.ML

object EstimatorModelWrapperFixtures {

  class SimpleSparkModel private[EstimatorModelWrapperFixtures]()
    extends ML.Model[SimpleSparkModel] {

    def this(x: String) = this()

    override val uid: String = "modelId"

    val predictionCol = new SparkParam[String](uid, "name", "description")

    def setPredictionCol(value: String): this.type = set(predictionCol, value)

    override def copy(extra: ParamMap): this.type = defaultCopy(extra)

    override def transformDF(dataset: DataFrame): DataFrame = {
      dataset.selectExpr("*", "1 as " + $(predictionCol))
    }

    @DeveloperApi
    override def transformSchema(schema: StructType): StructType = ???
  }

  class SimpleSparkEstimator extends ML.Estimator[SimpleSparkModel] {

    def this(x: String) = this()

    override val uid: String = "estimatorId"

    val predictionCol = new SparkParam[String](uid, "name", "description")

    override def fitDF(dataset: DataFrame): SimpleSparkModel =
      new SimpleSparkModel().setPredictionCol($(predictionCol))

    override def copy(extra: ParamMap): ML.Estimator[SimpleSparkModel] = defaultCopy(extra)

    @DeveloperApi
    override def transformSchema(schema: StructType): StructType = {
      schema.add(StructField($(predictionCol), IntegerType, nullable = false))
    }
  }

  trait HasPredictionColumn extends Params {
    val predictionColumn = new SingleColumnCreatorParamWrapper[
        ml.param.Params { val predictionCol: SparkParam[String] }](
      "prediction column",
      None,
      _.predictionCol)
    setDefault(predictionColumn, "abcdefg")

    def getPredictionColumn(): String = $(predictionColumn)
    def setPredictionColumn(value: String): this.type = set(predictionColumn, value)
  }

  class SimpleSparkModelWrapper
    extends SparkModelWrapper[SimpleSparkModel, SimpleSparkEstimator]
    with HasPredictionColumn {

    override val params: Array[Param[_]] = Array(predictionColumn)
    override def report: Report = ???

    override protected def loadModel(
      ctx: ExecutionContext,
      path: String): SerializableSparkModel[SimpleSparkModel] = ???
  }

  class SimpleSparkEstimatorWrapper
    extends SparkEstimatorWrapper[SimpleSparkModel, SimpleSparkEstimator, SimpleSparkModelWrapper]
    with HasPredictionColumn {

    override val params: Array[Param[_]] = Array(predictionColumn)
    override def report: Report = ???
  }
} 
Example 26
Source File: Repair.scala    From flamy   with Apache License 2.0 5 votes vote down vote up
package com.flaminem.flamy.commands

import com.flaminem.flamy.commands.utils.FlamySubcommand
import com.flaminem.flamy.conf.{Environment, FlamyContext, FlamyGlobalOptions}
import com.flaminem.flamy.exec.FlamyRunner
import com.flaminem.flamy.exec.hive.HiveTableFetcher
import com.flaminem.flamy.exec.utils.{Action, ActionRunner, ReturnStatus, ReturnSuccess}
import com.flaminem.flamy.model.ItemFilter
import com.flaminem.flamy.model.names.{ItemName, TableName}
import com.flaminem.flamy.utils.AutoClose
import org.rogach.scallop.{ScallopConf, ScallopOption, Subcommand}

import scala.language.reflectiveCalls


class Repair extends Subcommand("repair") with FlamySubcommand{

  val tables = new Subcommand("tables") {
    banner("Execute a msck repair table on every specified table. " +
      "This will automatically add to the metastore the partitions that exists on hdfs but not yet in the metastore.")
    val environment: ScallopOption[Environment] =
      opt(name="on", descr="Specifies environment to run on", required = true, noshort=true)
    val dryRun: ScallopOption[Boolean] =
      opt(name="dry", default=Some(false), descr="Perform a dry-run", required = false, noshort=true)
    val items: ScallopOption[List[ItemName]] =
      trailArg[List[ItemName]](default=Some(List()),required = false)
  }

  private class RepairTableAction(runner: FlamyRunner, tableName: TableName) extends Action{

    @throws(classOf[Exception])
    override def run(): Unit = {
      runner.runText(f"use ${tableName.schemaName} ; MSCK REPAIR TABLE ${tableName.name}")
    }

    override val name: String = tableName.fullName
    override val logPath: String = f"${tableName.schemaName}.db/${tableName.name}/REPAIR.hql"
  }

  private def repairTables(context: FlamyContext, items: ItemName*): Unit = {
    val itemFilter = new ItemFilter(items, acceptIfEmpty = true)
    val fetcher = HiveTableFetcher(context)
    val tables: Iterable[TableName] = fetcher.listTables(itemFilter).filterNot{_.isView}.filter{_.isPartitioned}.map{_.tableName}

    val actionRunner: ActionRunner = new ActionRunner(silentOnSuccess = false)
    for {
      flamyRunner: FlamyRunner <- AutoClose(FlamyRunner(context))
    } {
      val actions = tables.map{tableName => new RepairTableAction(flamyRunner, tableName)}
      actionRunner.run(actions)
    }
  }

  override def doCommand(globalOptions: FlamyGlobalOptions, subCommands: List[ScallopConf]): ReturnStatus = {
    subCommands match {
      case ([email protected]) :: Nil =>
        val context = new FlamyContext(globalOptions, command.environment.get)
        context.dryRun = command.dryRun()
        repairTables(context, command.items():_*)
      case _ => printHelp()
    }
    ReturnSuccess
  }


} 
Example 27
Source File: WRTypeTests.scala    From sigmastate-interpreter   with MIT License 5 votes vote down vote up
package special.wrappers

import scalan.RType

import scala.language.reflectiveCalls

class WRTypeTests extends WrappersTests {

  lazy val ctx = new WrappersCtx
  import ctx._
  import Coll._
  import WRType._
  import EnvRep._
  import Liftables._

  test("invokeUnlifted") {
    val ty = RType[Int]
    check(ty, { env: EnvRep[WRType[Int]] => for { xs <- env } yield xs.name }, ty.name)
  }

  test("Implicit conversion from RType to Elem") {
    val eInt: Elem[Int] = RType.IntType
    eInt shouldBe IntElement

    val ePair: Elem[(Int, Coll[Byte])] = RType[(Int, SColl[Byte])]
    ePair shouldBe element[(Int, Coll[Byte])]
  }
} 
Example 28
Source File: WSpecialPredefTests.scala    From sigmastate-interpreter   with MIT License 5 votes vote down vote up
package special.wrappers


import scala.language.reflectiveCalls
import scalan.RType

class WSpecialPredefTests extends WrappersTests {

  lazy val ctx = new WrappersCtx
  import ctx._
  import WSpecialPredef._
  import CCostedBuilder._
  import CostedBuilder._

  lazy val SPCM = WSpecialPredefCompanionMethods
  lazy val CCB = CostedBuilderMethods

  test("some") {
    val x: Ref[Int] = 10
    val opt = RWSpecialPredef.some(x)
    opt match {
      case SPCM.some(_x) => _x shouldBe x
      case _ => assert(false)
    }
  }

  test("costedValue") {
    val cost: Ref[Int] = 10
    val optCost = RWSpecialPredef.some(cost)
    val b: Ref[CostedBuilder] = RCCostedBuilder()
    val x: Ref[Long] = 1L
    val value = b.costedValue(x, optCost)
    value match {
      case CCB.costedValue(_b, _x, SPCM.some(_cost)) =>
        _b shouldBe b
        _x shouldBe x
        _cost shouldBe cost
      case _ => assert(false)
    }
  }

} 
Example 29
Source File: PulsarContinuousTest.scala    From pulsar-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.pulsar

import java.util.concurrent.atomic.AtomicInteger

import scala.language.reflectiveCalls

import org.apache.spark.SparkContext
import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskEnd, SparkListenerTaskStart}
import org.apache.spark.sql.execution.streaming.continuous.ContinuousExecution
import org.apache.spark.sql.streaming.Trigger
import org.apache.spark.sql.test.TestSparkSession

trait PulsarContinuousTest extends PulsarSourceTest {

  override val defaultTrigger = Trigger.Continuous(1000)
  override val defaultUseV2Sink = true

  // We need more than the default local[2] to be able to schedule all partitions simultaneously.
  override protected def createSparkSession =
    new TestSparkSession(
      new SparkContext(
        "local[10]",
        "continuous-stream-test-sql-context",
        sparkConf.set("spark.sql.testkey", "true")))

  // Continuous processing tasks end asynchronously, so test that they actually end.
  private val tasksEndedListener = new SparkListener() {
    val activeTaskIdCount = new AtomicInteger(0)

    override def onTaskStart(start: SparkListenerTaskStart): Unit = {
      activeTaskIdCount.incrementAndGet()
    }

    override def onTaskEnd(end: SparkListenerTaskEnd): Unit = {
      activeTaskIdCount.decrementAndGet()
    }
  }

  override def beforeEach(): Unit = {
    super.beforeEach()
    spark.sparkContext.addSparkListener(tasksEndedListener)
  }

  override def afterEach(): Unit = {
    eventually(timeout(streamingTimeout)) {
      assert(tasksEndedListener.activeTaskIdCount.get() == 0)
    }
    spark.sparkContext.removeSparkListener(tasksEndedListener)
    super.afterEach()
  }

  test("ensure continuous stream is being used") {
    val query = spark.readStream
      .format("rate")
      .option("numPartitions", "1")
      .option("rowsPerSecond", "1")
      .load()

    testStream(query)(
      Execute(q => assert(q.isInstanceOf[ContinuousExecution]))
    )
  }
} 
Example 30
Source File: ProgGenIssues.scala    From lift   with MIT License 5 votes vote down vote up
package prog_gen

import ir._
import ir.ast._
import ir.interpreter.Interpreter
import opencl.executor.{Eval, Execute, TestWithExecutor}
import opencl.ir._
import opencl.ir.pattern.{MapSeq, ReduceSeq, toGlobal}
import org.junit.Assert._
import org.junit._
import rewriting.{EnabledMappings, Lower}

import scala.language.reflectiveCalls

object ProgGenIssues extends TestWithExecutor

class ProgGenIssues{

  @Test
  def hlGenResultNotEqual1(): Unit = {

    val f = Eval("val add = UserFun(\"add\", Array(\"x\", \"y\"), \"\"\"|{ return x+y; }\"\"\".stripMargin, Seq(Float, Float), Float).setScalaFun (xs => xs.head.asInstanceOf[Float] + xs(1).asInstanceOf[Float])\nfun(Float, ArrayTypeWSWC(ArrayTypeWSWC(Float, 32), 32), ArrayTypeWSWC(Float, 32),(p_0, p_1, p_2) => FunCall(Map(fun((p_3) => FunCall(Reduce(fun((p_4, p_5) => FunCall(add, p_4, p_5))), FunCall(add, p_0, p_3), FunCall(Map(fun((p_6) => FunCall(add, p_6, p_6))), FunCall(Join(), p_1))))), FunCall(Map(fun((p_7) => FunCall(add, p_7, p_7))), p_2)))")
    val fs = Lower.mapCombinations(f,
      EnabledMappings(global0 = true, global01 = false, global10 = false, false, false, group0 = false, group01 = false, group10 = false))

    val lower = fs.head
    TypeChecker(lower)

    val Args = InputGenerator()(fs.head)
    val output_int = Interpreter(f).->[Vector[Vector[Float]]].runAndFlatten(Args:_*).toArray[Float]
    val (output_exe,_)= Execute(1,32)[Array[Float]](lower,Args:_*)
    assertArrayEquals(output_int, output_exe, 0.0f)
  }

  @Test
  def issue76(): Unit = {

    val f = fun(
      Float,
      ArrayTypeWSWC(Float,32),
      (p236,p116) =>{
        Map(fun((p200) =>
          add(p236,add(p236,p200))
        )) $ p116
      })
    val fs = Lower.mapCombinations(f,
      EnabledMappings(global0 = true, global01 = false, global10 = false, false, false, group0 = false, group01 = false, group10 = false))

    val Args = InputGenerator()(fs.head)

    val output_int = Interpreter(f).->[Vector[Float]].run(Args:_*).toArray[Float]
    val (output_exe,_)= Execute(1,1024)[Array[Float]](fs.head,Args:_*)
    assertArrayEquals(output_int, output_exe, 0.0f)
  }

  @Test
  def issue78(): Unit = {

    val f = fun(
      Float,
      ArrayTypeWSWC(Float,32),
      (p252,p174) =>
        toGlobal(MapSeq(fun((p30) =>
          add(p252,p30)
        )))(ReduceSeq(fun((p89,p156) =>
          add(p89,p156)
        ))(id $ p252,p174))
    )

    val args = InputGenerator()(f)

    val output_int = Interpreter(f).->[Vector[Float]].run(args:_*).toArray[Float]
    val (output_exe,_)= Execute(1,1024)[Array[Float]](f, args:_*)
    assertArrayEquals(output_int, output_exe, 0.0f)
  }
} 
Example 31
Source File: DynamoSchema.scala    From orders-aws   with Apache License 2.0 5 votes vote down vote up
package works.weave.socks.spring.aws

import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClient
import com.amazonaws.services.dynamodbv2.model._

import org.slf4j.LoggerFactory

import scala.collection.JavaConverters._
import scala.language.reflectiveCalls

import works.weave.socks.spring.Ops._

abstract class DynamoSchema(dynamoConnection : DynamoConfiguration) {

  val LOG = LoggerFactory.getLogger(getClass)

  def createMissing(client : AmazonDynamoDBClient) : Unit = {
    val tableNames = client.listTables().getTableNames.asScala.toSet

    schema { table =>
      val name = table.getTableName
      if (tableNames contains name) {
        LOG.info("Table '{}' present", name)
      } else {
        LOG.info("Table '{}' missing, creating...", name)
        client.createTable(table)
        LOG.info("Table '{}' created", name)
      }
    }
  }

  
  def pollWithTimeout(timeout : Int) = new {
    def until(f : => Boolean) : Boolean = {
      def loop(timeoutBudget : Int)(delayMillis : Int) : Boolean = {
        if (f) {
          true
        } else if (timeoutBudget <= 0) {
          false
        } else {
          Thread.sleep(Math.min(timeoutBudget, delayMillis))
          loop(timeoutBudget - delayMillis)(delayMillis * 2)
        }
      }
      loop(timeout)(10)
    }
  }

  def resetDestructively(client : AmazonDynamoDBClient) : Unit = {
    val tableNames = client.listTables().getTableNames.asScala.toSet

    schema { table =>
      val name = table.getTableName
      if (tableNames contains name) {
        LOG.info("Table '{}' present, destroying...", name)

        client.deleteTable(name)
        LOG.info("Awaiting deletion")
        pollWithTimeout(60000) until {
          try {
            client.describeTable(name)
            false
          } catch {
            case e : ResourceNotFoundException =>
              true
          }
        }
        //client.describeTable(name).table.tableStatus.

      }

      LOG.info("Table '{}' creating...", name)
      client.createTable(table)
      LOG.info("Table '{}' created", name)

    }
  }

  protected def schema(declare : CreateTableRequest => Any) : Unit

  val hash = KeyType.HASH
  val range = KeyType.RANGE

  final protected def keySchemaElement(name : String, keyType : KeyType) =
    new KeySchemaElement(name, keyType)

  final protected def attributeDefinition(name : String, scalarAttributeType : ScalarAttributeType) =
    new AttributeDefinition(name, scalarAttributeType)

  final protected def table(name : String,
    attributeDefinitions : Seq[AttributeDefinition],
    keySchema : Seq[KeySchemaElement],
    provisionedThrougput : ProvisionedThroughput) : CreateTableRequest = (new CreateTableRequest()
    after (_.setTableName(name))
    after (_.setAttributeDefinitions(attributeDefinitions.asJava))
    after (_.setKeySchema(keySchema.asJava))
    after (_.setProvisionedThroughput(provisionedThrougput)))

} 
Example 32
Source File: MultiParamSpec.scala    From typed-schema   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.tschema.akkaHttp

import akka.http.scaladsl.server.{Directives, MissingQueryParamRejection}
import akka.http.scaladsl.testkit.ScalatestRouteTest
import ru.tinkoff.tschema.akkaHttp.MultiParamSpec.{Page, route}
import ru.tinkoff.tschema.param.HttpParam
import shapeless.Witness

import scala.language.reflectiveCalls
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

class MultiParamSpec extends AnyFlatSpec with Matchers with ScalatestRouteTest {
  "Multi parameter case class" should "require first param" in {
    Get("/required") ~> route ~> check {
      rejections should contain(MissingQueryParamRejection("from"))
    }
  }

  it should "require second param" in {
    Get("/required?from=3") ~> route ~> check {
      rejections should contain(MissingQueryParamRejection("to"))
    }
  }

  it should "not require optional field" in {
    Get("/required?from=3&to=5") ~> route ~> check {
      responseAs[String] shouldBe Page(3, 5).toString
    }
  }

  it should "supply optional field" in {
    Get("/required?from=3&to=5&hmm=true") ~> route ~> check {
      responseAs[String] shouldBe Page(3, 5, Some(true)).toString
    }
  }

  it should "not require whole optional record" in {
    Get("/optional?from=3") ~> route ~> check {
      responseAs[String] shouldBe None.toString
    }
    Get("/optional?to=3") ~> route ~> check {
      responseAs[String] shouldBe None.toString
    }
    Get("/optional") ~> route ~> check {
      responseAs[String] shouldBe None.toString
    }
  }

  it should "supply optional record" in {
    Get("/optional?from=3&to=5&hmm=false") ~> route ~> check {
      responseAs[String] shouldBe Some(Page(3, 5, Some(false))).toString
    }
  }

  it should "supply partial optional record with optional fields" in {
    Get("/optional?from=3&to=5") ~> route ~> check {
      responseAs[String] shouldBe Some(Page(3, 5)).toString
    }
  }

}

object MultiParamSpec {
  val page = Witness.`"page"`
  final case class Page(from: Int, to: Long, hmm: Option[Boolean] = None)
  object Page {
    implicit val pageParam: HttpParam[Page] = HttpParam.generate
  }

  val api = {
    import ru.tinkoff.tschema.syntax._
    (operation("required") |> queryParam[Page]("page") |> complete[String]) <|>
      (operation("optional") |> queryParam[Option[Page]]("page") |> complete[String])
  }

  val handler = new {
    def required(page: Page): String         = page.toString
    def optional(page: Option[Page]): String = page.toString
  }

  val route = MkRoute(api)(handler)

  import Directives._
  val kek = Directives.parameter("kek".as[Option[String]])(os => complete(os))

  parameters(("raw".as[Boolean], "offset".as[Int], "pageSize".as[Int]))
} 
Example 33
Source File: InnerSuite.scala    From typed-schema   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.tschema.akkaHttp

import akka.http.scaladsl.marshalling.{Marshaller, Marshalling, ToResponseMarshaller}
import akka.http.scaladsl.model.{ContentTypes, HttpResponse}
import akka.http.scaladsl.testkit.ScalatestRouteTest
import ru.tinkoff.tschema.syntax._

import org.scalatest.flatspec.AsyncFlatSpec
import org.scalatest.matchers.should.Matchers
import scala.language.reflectiveCalls

class InnerSuite extends AsyncFlatSpec with ScalatestRouteTest with Matchers {

  object impl {
    object first {
      def get: String           = "first"
      def post(message: String) = s"first $message"
    }
    val second = new {
      def get: String           = "second"
      def post(message: String) = s"second $message"
    }
  }

  implicit val unitAsPlainText: ToResponseMarshaller[Unit] =
    Marshaller.strict(_ => Marshalling.WithFixedContentType(ContentTypes.NoContentType, () => HttpResponse()))

  def api =
    (
      groupPrefix("first") |> ((
        opGet |> $$[String]
      ) <> (
        opPost |> queryParam[String]("message") |> $$[String]
      ))
    ) <> (
      groupPrefix("second") |> ((
        opGet |> $$[String]
      ) <> (
        opPost |> body[String]("message") |> $$[String]
      ))
    )

  val route = MkRoute(api)(impl)

  "first group" should "handle get" in Get("/first") ~> route ~> check {
    responseAs[String] shouldBe "first"
  }

  it should "handle post" in Post("/first?message=hello+oleg") ~> route ~> check {
    responseAs[String] shouldBe "first hello oleg"
  }

  "second group" should "handle get" in Get("/second") ~> route ~> check {
    responseAs[String] shouldBe "second"
  }

  it should "handle post" in Post("/second", "hello oleg") ~> route ~> check {
    responseAs[String] shouldBe "second hello oleg"
  }

} 
Example 34
Source File: ReplScioContext.scala    From scio   with Apache License 2.0 5 votes vote down vote up
package com.spotify.scio.repl

import org.apache.beam.sdk.options.PipelineOptions
import com.spotify.scio.{ScioContext, ScioExecutionContext}

class ReplScioContext(options: PipelineOptions, artifacts: List[String])
    extends ScioContext(options, artifacts) {

  
  override private[scio] def requireNotClosed[T](body: => T): T = {
    require(
      !this.isClosed,
      "ScioContext has already been executed, use :newScio <[context-name] | sc> to create new context"
    )
    super.requireNotClosed(body)
  }

  private def createJar(): Unit = {
    import scala.language.reflectiveCalls
    this.getClass.getClassLoader
      .asInstanceOf[{ def createReplCodeJar: String }]
      .createReplCodeJar
    ()
  }
} 
Example 35
Source File: Scanner.scala    From spark-vector   with Apache License 2.0 5 votes vote down vote up
package com.actian.spark_vector.datastream.reader

import scala.language.reflectiveCalls

import org.apache.spark.{ Partition, SparkContext, TaskContext }
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow

import com.actian.spark_vector.datastream.VectorEndpointConf


  @volatile private var it: RowReader = _
  
  override protected def getPartitions = (0 until readConf.size).map(idx => new Partition { def index = idx }).toArray

  override protected def getPreferredLocations(split: Partition) = Seq(readConf.vectorEndpoints(split.index).host)
  
  override def compute(split: Partition, taskContext: TaskContext): Iterator[InternalRow] = {
    taskContext.addTaskCompletionListener { _ => closeAll() }
    taskContext.addTaskFailureListener { (_, e) => closeAll(Option(e)) }
    logDebug("Computing partition " + split.index)
    try {
      it = reader.read(split.index)
      it
    } catch { case e: Exception => 
      logDebug("Exception occurred when attempting to read from stream. If termination was abnormal an additional exception will be thrown.", e)
      Iterator.empty
    }
  }
  
  def touchDatastreams(parts: List[Int] = List[Int]()) {
    val untouched = List.range(0, readConf.size).diff(parts)
    untouched.foreach ( p =>
      try {
        reader.touch(p) //Need to ensure all the streams have been closed except the one used by this instance
        logDebug(s"Closed partition $p Vector transfer datastream")
      } catch {
        case e: Exception => logDebug("Exception while closing unused Vector transfer datastream " + e.toString())
      }
    )
  }
  
  def closeAll(failure: Option[Throwable] = None): Unit = {
    failure.foreach(logError("Failure during task completion, closing RowReader", _))
    if (it != null) {
      close(it, "RowReader")
      it = null;
    }
  }
  
  private def close[T <: { def close() }](c: T, resourceName: String): Unit = if (c != null) {
    try { c.close } catch { case e: Exception => logWarning(s"Exception closing $resourceName", e) }
  }
  
} 
Example 36
Source File: Run.scala    From flamy   with Apache License 2.0 5 votes vote down vote up
package com.flaminem.flamy.commands

import com.flaminem.flamy.commands.utils.FlamySubcommand
import com.flaminem.flamy.conf.{Environment, FlamyContext, FlamyGlobalOptions}
import com.flaminem.flamy.exec.run.GraphRunner
import com.flaminem.flamy.exec.utils.{ReturnFailure, ReturnStatus}
import com.flaminem.flamy.model.ItemArgs
import com.flaminem.flamy.model.names.ItemName
import org.rogach.scallop.{ScallopConf, ScallopOption, Subcommand}

import scala.language.reflectiveCalls


class Run extends Subcommand("run") with FlamySubcommand{

  banner("Perform a run on the specified environment")

  private val environment: ScallopOption[Environment] =
    opt(name="on", default=None, descr="Specifies environment to run on.", required=false, noshort=true)

  private val dryRun: ScallopOption[Boolean] =
    opt(name="dry", default=Some(false), descr="Perform a dry-run", noshort=true)

  validateOpt(environment, dryRun) {
    case (None,Some(false)) => Left("Please specify an environment to run on (with the --on option), or use the --dry option to perform a local dry-run")
    case _ => Right(())
  }

  private val from: ScallopOption[List[ItemName]] =
    opt[List[ItemName]](name="from", default=Some(Nil), descr="start from the given schemas/tables.", noshort=true, argName = "items")

  private val to: ScallopOption[List[ItemName]] =
    opt[List[ItemName]](name="to", default=Some(Nil), descr="stop at the given schemas/tables.", noshort=true, argName = "items")
  codependent(from,to)

  private val items: ScallopOption[List[ItemName]] =
    trailArg[List[ItemName]](default=Some(Nil),required=false)

  lazy val itemArgs = ItemArgs(items(), from(), to())

  override def doCommand(globalOptions: FlamyGlobalOptions, subCommands: List[ScallopConf]): ReturnStatus = {
    val context = new FlamyContext(globalOptions, this.environment.get)
    context.dryRun = this.dryRun()
    if (itemArgs.isEmpty) {
      System.err.println("Please specify items to run on")
      ReturnFailure
    }
    else {
      val graphRunner = GraphRunner(itemArgs, context)
      graphRunner.run()
    }
  }

} 
Example 37
Source File: Check.scala    From flamy   with Apache License 2.0 5 votes vote down vote up
package com.flaminem.flamy.commands

import com.flaminem.flamy.commands.utils.FlamySubcommand
import com.flaminem.flamy.conf.spark.ModelSparkContext
import com.flaminem.flamy.conf.{Environment, FlamyContext, FlamyGlobalOptions}
import com.flaminem.flamy.exec.FlamyRunner
import com.flaminem.flamy.exec.files.{FileRunner, ItemFileAction}
import com.flaminem.flamy.exec.hive.{HivePartitionFetcher, ModelHivePartitionFetcher}
import com.flaminem.flamy.exec.utils._
import com.flaminem.flamy.exec.utils.io.FlamyOutput
import com.flaminem.flamy.graph.TableGraph
import com.flaminem.flamy.model._
import com.flaminem.flamy.model.core.Model
import com.flaminem.flamy.model.files.FilePath
import com.flaminem.flamy.model.names.ItemName
import org.apache.spark.sql.SQLContext
import org.rogach.scallop.{ScallopConf, ScallopOption, Subcommand}

import scala.language.reflectiveCalls


      val runGraph: TableGraph = baseGraph.subGraph(items())

      val dryRunner: FlamyRunner = FlamyRunner(context)
      println("Creating schemas and tables ...")
      try {
        dryRunner.checkAll(baseGraph)
      }
      finally{
        //TODO: For some strange reason, closing the connection here will result in ClassNotFoundErrors for udfs in the RunActions...
        //      dryRunner.close()
      }
      FlamyOutput.out.info("Running Populates ...")
      dryRunner.populateAll(runGraph.model, context)
      dryRunner.close()
      ReturnStatus(success = dryRunner.getStats.getFailCount==0)
    }

  }

  override def doCommand(globalOptions: FlamyGlobalOptions, subCommands: List[ScallopConf]): ReturnStatus = {
    subCommands match {
      case  (command: FlamySubcommand)::Nil => command.doCommand(globalOptions, Nil)
      case Nil => throw new IllegalArgumentException("A subcommand is expected")
      case _ =>
        printHelp()
        ReturnFailure
    }
  }


} 
Example 38
Source File: GatherInfo.scala    From flamy   with Apache License 2.0 5 votes vote down vote up
package com.flaminem.flamy.commands

import com.flaminem.flamy.commands.utils.FlamySubcommand
import com.flaminem.flamy.conf.{Environment, FlamyContext, FlamyGlobalOptions}
import com.flaminem.flamy.exec.hive.HivePartitionFetcher
import com.flaminem.flamy.exec.utils.{ReturnStatus, ReturnSuccess}
import com.flaminem.flamy.model.ItemFilter
import com.flaminem.flamy.model.names.ItemName
import com.flaminem.flamy.utils.AutoClose
import com.flaminem.flamy.utils.time.TimeUtils
import org.rogach.scallop.{ScallopConf, ScallopOption, Subcommand}

import scala.language.reflectiveCalls


class GatherInfo extends Subcommand("gather-info") with FlamySubcommand {

  banner("Gather all partitioning information on specified items (everything if no argument is given) and output this as csv on stdout.")

  val environment: ScallopOption[Environment] =
    opt(name = "on", descr = "Specifies environment to run on", required = true, noshort = true)

  val items: ScallopOption[List[ItemName]] =
    trailArg[List[ItemName]](default = Some(List()), required = false)

  override def doCommand(globalOptions: FlamyGlobalOptions, subCommands: List[ScallopConf]): ReturnStatus = {
    val context = new FlamyContext(globalOptions, this.environment.get)
    for{
      fetcher: HivePartitionFetcher <- AutoClose(HivePartitionFetcher(context))
    } {
      val itemFilter = new ItemFilter(this.items(), true)
      for {
        tpInfo <- fetcher.listTableNames.filter{itemFilter}.map{fetcher.getTablePartitioningInfo}
        partition <- tpInfo.sortedTablePartitions
      } {
        println(
          Seq(
            tpInfo.tableName.schemaName,
            tpInfo.tableName.name,
            partition.partitionName,
            partition.getFileSize.getOrElse("\\N"),
            partition.getModificationTime(context, refresh = false).map {
              TimeUtils.timestampToUniversalTime
            }.getOrElse("\\N")
          ).mkString("\t")
        )
      }
    }
    ReturnSuccess
  }

} 
Example 39
Source File: WOptionTests.scala    From sigmastate-interpreter   with MIT License 5 votes vote down vote up
package special.wrappers

import scala.language.reflectiveCalls
import scalan.Library

class WOptionTests extends WrappersTests {

  test("invokeUnlifted") {
    val ctx = new WrappersCtx
    import ctx._
    import WOption._
    import EnvRep._

    val opt = Option(1)
    check(opt, { env: EnvRep[WOption[Int]] => for { xs <- env } yield xs.get }, opt.get)
    check(opt, { env: EnvRep[WOption[Int]] => for { xs <- env } yield xs.isEmpty }, opt.isEmpty)
    check(opt, { env: EnvRep[WOption[Int]] => for { xs <- env } yield xs.isDefined }, opt.isDefined)

    val none: Option[Int] = None
    val th = () => 10
    check(none, { env: EnvRep[WOption[Int]] => for { xs <- env; thL <- lifted(th) } yield xs.getOrElse(thL) }, none.getOrElse(th()))

    val p = (x: Int) => x == 2
    check(opt, { env: EnvRep[WOption[Int]] => for { xs <- env; pL <- lifted(p) } yield  xs.filter(pL) }, opt.filter(p))

    val inc = (x: Int) => x + 1
    check(opt, { env: EnvRep[WOption[Int]] =>
     for { xs <- env; thL <- lifted(th); incL <- lifted(inc) } yield xs.fold(thL, incL) },
     opt.fold(th())(inc))

    check(none, { env: EnvRep[WOption[Int]] =>
      for { xs <- env;  thL <- lifted(th); incL <- lifted(inc) } yield xs.fold(thL, incL) }, none.fold(th())(inc))
    check(opt, { env: EnvRep[WOption[Int]] => for { xs <- env; incL <- lifted(inc) } yield xs.map(incL) }, opt.map(inc))

    val incOpt = (x: Int) => Option(x + 1)
    val incNone = (x: Int) => (None: Option[Int])
    check(opt, { env: EnvRep[WOption[Int]] => for { xs <- env; incOptL <- lifted(incOpt)} yield xs.flatMap(incOptL) }, opt.flatMap(incOpt))
    check(opt, { env: EnvRep[WOption[Int]] => for { xs <- env; incNoneL <- lifted(incNone)} yield xs.flatMap(incNoneL) }, opt.flatMap(incNone))
  }
} 
Example 40
Source File: Count.scala    From flamy   with Apache License 2.0 5 votes vote down vote up
package com.flaminem.flamy.commands

import com.flaminem.flamy.commands.utils.FlamySubcommand
import com.flaminem.flamy.conf.{Environment, FlamyContext, FlamyGlobalOptions}
import com.flaminem.flamy.exec.hive.{HivePartitionFetcher, HiveTableFetcher, RemoteHiveRunner}
import com.flaminem.flamy.exec.utils.io.FlamyOutput
import com.flaminem.flamy.exec.utils.{ReturnFailure, ReturnStatus, ReturnSuccess}
import com.flaminem.flamy.model.ItemFilter
import com.flaminem.flamy.model.names.{ItemName, TableName, TablePartitionName}
import com.flaminem.flamy.utils.AutoClose
import com.flaminem.flamy.utils.prettyprint.Tabulator
import com.flaminem.flamy.utils.sql.hive.StreamedResultSet
import org.rogach.scallop.{ScallopConf, ScallopOption, Subcommand}

import scala.language.reflectiveCalls


class Count extends Subcommand("count") with FlamySubcommand {

  val tables = new Subcommand("tables") with FlamySubcommand {
    banner("Execute a select count(1) on every specified table.")
    val environment: ScallopOption[Environment] =
      opt(name="on", descr="Specifies environment to run on", required=false, noshort=true)
    val items: ScallopOption[List[ItemName]] =
      trailArg[List[ItemName]](default = Some(List()), required = false)

    override def doCommand(globalOptions: FlamyGlobalOptions, subCommands: List[ScallopConf]): ReturnStatus = {
      val context = new FlamyContext(globalOptions, environment.get)
      val itemFilter = ItemFilter(items(), acceptIfEmpty = true)
      val fetcher = HiveTableFetcher(context)
      val tables: Iterable[TableName] = fetcher.listTableNames.filter{itemFilter}

      val hiveRunner: RemoteHiveRunner = new RemoteHiveRunner(context)
      try {
        for {
          tableName <- tables if !Thread.currentThread().isInterrupted
        } try {
          val res: StreamedResultSet = hiveRunner.executeQuery(f"SELECT COUNT(1) FROM $tableName")
          val row = res.next()
          FlamyOutput.out.success(f"ok: $tableName : ${row(0)}")
        } catch {
          case e: Throwable =>
            e.printStackTrace()
            FlamyOutput.err.failure(f"not ok: $tableName : ${e.getMessage}")
        }
      }
      finally{
        hiveRunner.close()
      }
      ReturnSuccess
    }

  }

  override def doCommand(globalOptions: FlamyGlobalOptions, subCommands: List[ScallopConf]): ReturnStatus = {
    subCommands match {
      case  (command: FlamySubcommand)::Nil => command.doCommand(globalOptions, Nil)
      case Nil => throw new IllegalArgumentException("A subcommand is expected")
      case _ =>
        printHelp()
        ReturnFailure
    }
  }

} 
Example 41
Source File: WaitForPartition.scala    From flamy   with Apache License 2.0 5 votes vote down vote up
package com.flaminem.flamy.commands

import com.flaminem.flamy.commands.utils.FlamySubcommand
import com.flaminem.flamy.conf.{Environment, FlamyContext, FlamyGlobalOptions}
import com.flaminem.flamy.exec.hive.PartitionWaiter
import com.flaminem.flamy.exec.utils.ReturnStatus
import com.flaminem.flamy.model.names.ItemName
import com.flaminem.flamy.utils.AutoClose
import com.flaminem.flamy.utils.time.TimeUtils
import org.rogach.scallop.{ScallopConf, ScallopOption, Subcommand}

import scala.language.reflectiveCalls


class WaitForPartition extends Subcommand("wait-for-partition") with FlamySubcommand{

  banner("Wait for a partition to be created.")

  val environment: ScallopOption[Environment] =
    opt(name = "on", descr = "Specifies environment to run on", required = true, noshort = true)
  val timeout: ScallopOption[Long] =
    opt(
      name = "timeout",
      descr = "Number of seconds after which flamy will fail if the partitions still does not exist",
      default = Some(12 * 3600),
      noshort = true
    )
  val after: ScallopOption[String] =
    opt(
      name = "after",
      argName = "yyyy-MM-dd HH:mm:ss",
      descr = """Wait for the partition to be created or refreshed after this time. Expected format is "yyyy-MM-dd HH:mm:ss"""",
      default = None,
      noshort = true
    )
  val retryInterval: ScallopOption[Long] =
    opt(
      name = "retry-interval",
      argName = "INTERVAL",
      descr = "When a partition is not found, retry after INTERVAL seconds",
      default = Some(60),
      noshort = true
    )
  val items: ScallopOption[List[ItemName]] =
    trailArg[List[ItemName]](required = true)

  override def doCommand(globalOptions: FlamyGlobalOptions, subCommands: List[ScallopConf]): ReturnStatus = {
    val context = new FlamyContext(globalOptions, environment.get)
    val waiter = new PartitionWaiter(context)
    for{
      waiter: PartitionWaiter <- AutoClose(new PartitionWaiter(context))
    } yield {
      waiter.waitForPartition(items(), timeout(), after.get.map{TimeUtils.universalTimeToTimeStamp}, retryInterval())
    }
  }

} 
Example 42
Source File: ManagedResource.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.sparkutils.readwritedataframe

import scala.language.reflectiveCalls

// TODO duplicated from io.deepsense.commons.resources.ManagedResource

object ManagedResource {
  def apply[T, Q](c: T {def close(): Unit})(f: (T) => Q): Q = {
    try {
      f(c)
    } finally {
      c.close()
    }
  }
} 
Example 43
Source File: MultiColumnEstimator.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables

import scala.language.reflectiveCalls
import scala.reflect.runtime.universe.TypeTag

import org.apache.spark.sql.types.StructType

import io.deepsense.deeplang.ExecutionContext
import io.deepsense.deeplang.doperables.dataframe.DataFrame
import io.deepsense.deeplang.doperables.multicolumn.HasSpecificParams
import io.deepsense.deeplang.doperables.multicolumn.MultiColumnParams.MultiColumnInPlaceChoices.{MultiColumnNoInPlace, MultiColumnYesInPlace}
import io.deepsense.deeplang.doperables.multicolumn.MultiColumnParams.SingleOrMultiColumnChoices.{MultiColumnChoice, SingleColumnChoice}
import io.deepsense.deeplang.doperables.multicolumn.SingleColumnParams.SingleTransformInPlaceChoices.{NoInPlaceChoice, YesInPlaceChoice}
import io.deepsense.deeplang.doperables.spark.wrappers.params.common.HasInputColumn
import io.deepsense.deeplang.params.IOColumnsParam
import io.deepsense.deeplang.params.selections.NameSingleColumnSelection


  override private[deeplang] def _fit_infer(
      schema: Option[StructType]): T = {
    $(singleOrMultiChoiceParam) match {
      case single: SingleColumnChoice =>
        handleSingleColumnChoiceInfer(schema, single)
      case multi: MultiColumnChoice =>
        handleMultiColumnChoiceInfer(schema, multi)
    }
  }
} 
Example 44
Source File: DecisionTreeParams.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params

import scala.language.reflectiveCalls

import io.deepsense.deeplang.doperables.spark.wrappers.params.common._
import io.deepsense.deeplang.params.Params

trait DecisionTreeParams
  extends Params
  with PredictorParams
  with HasCheckpointIntervalParam
  with HasSeedParam
  with HasMaxDepthParam
  with HasMaxBinsParam
  with HasMinInstancePerNodeParam
  with HasMinInfoGainParam
  with HasMaxMemoryInMBParam
  with HasCacheNodeIdsParam 
Example 45
Source File: GBTParams.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.doperables.spark.wrappers.params.common._
import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.validators.RangeValidator
import io.deepsense.deeplang.params.wrappers.spark.{DoubleParamWrapper, IntParamWrapper}

trait GBTParams extends Params
  with PredictorParams
  with HasLabelColumnParam
  with HasMaxIterationsParam
  with HasSeedParam
  with HasStepSizeParam
  with HasMaxBinsParam
  with HasMaxDepthParam
  with HasMinInfoGainParam
  with HasMinInstancePerNodeParam
  with HasSubsamplingRateParam 
Example 46
Source File: AFTSurvivalRegressionParams.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.doperables.spark.wrappers.params.common._
import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.validators.{ArrayLengthValidator, ComplexArrayValidator, RangeValidator}
import io.deepsense.deeplang.params.wrappers.spark.DoubleArrayParamWrapper

trait AFTSurvivalRegressionParams extends Params
    with PredictorParams
    with HasOptionalQuantilesColumnParam {

  val quantileProbabilities =
    new DoubleArrayParamWrapper[
        ml.param.Params { val quantileProbabilities: ml.param.DoubleArrayParam }](
      name = "quantile probabilities",
      description = Some("""Param for quantile probabilities array.
                      |Values of the quantile probabilities array should be in the range (0, 1)
                      |and the array should be non-empty.""".stripMargin),
      sparkParamGetter = _.quantileProbabilities,
      validator = ComplexArrayValidator(
        rangeValidator = RangeValidator(0, 1, beginIncluded = false, endIncluded = false),
        lengthValidator = ArrayLengthValidator.withAtLeast(1)
      ))
  setDefault(quantileProbabilities, Array(0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99))
} 
Example 47
Source File: Word2VecParams.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.doperables.spark.wrappers.params.common._
import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.validators.RangeValidator
import io.deepsense.deeplang.params.wrappers.spark.IntParamWrapper

trait Word2VecParams extends Params
  with HasMaxIterationsParam
  with HasStepSizeParam
  with HasSeedParam {

  val vectorSize = new IntParamWrapper[ml.param.Params { val vectorSize: ml.param.IntParam }](
    name = "vector size",
    description = Some("The dimension of codes after transforming from words."),
    sparkParamGetter = _.vectorSize,
    validator = RangeValidator.positiveIntegers)
  setDefault(vectorSize -> 100)

  val numPartitions = new IntParamWrapper[ml.param.Params { val numPartitions: ml.param.IntParam }](
    name = "num partitions",
    description = Some("The number of partitions for sentences of words."),
    sparkParamGetter = _.numPartitions,
    validator = RangeValidator.positiveIntegers)
  setDefault(numPartitions -> 1)

  val minCount = new IntParamWrapper[ml.param.Params { val minCount: ml.param.IntParam }](
    name = "min count",
    description = Some("The minimum number of occurences of a token to " +
      "be included in the model's vocabulary."),
    sparkParamGetter = _.minCount,
    validator = RangeValidator.positiveIntegers)
  setDefault(minCount -> 5)

  def setMinCount(value: Int): this.type = {
    set(minCount -> value)
  }

  def setVectorSize(value: Int): this.type = {
    set(vectorSize -> value)
  }
} 
Example 48
Source File: HasItemColumnParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.selections.NameSingleColumnSelection
import io.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasItemColumnParam extends Params {

  val itemColumn =
    new SingleColumnSelectorParamWrapper[
      ml.param.Params { val itemCol: ml.param.Param[String] }](
      name = "item column",
      description = Some("The column for item ids."),
      sparkParamGetter = _.itemCol,
      portIndex = 0)
  setDefault(itemColumn, NameSingleColumnSelection("item"))
} 
Example 49
Source File: MinMaxParams.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.wrappers.spark.DoubleParamWrapper

trait MinMaxParams extends Params {

  val min = new DoubleParamWrapper[ml.param.Params { val min: ml.param.DoubleParam }](
    name = "min",
    description = Some("The lower bound after transformation, shared by all features."),
    sparkParamGetter = _.min)
  setDefault(min, 0.0)

  val max = new DoubleParamWrapper[ml.param.Params { val max: ml.param.DoubleParam }](
    name = "max",
    description = Some("The upper bound after transformation, shared by all features."),
    sparkParamGetter = _.max)
  setDefault(max, 1.0)

  def setMin(value: Double): this.type = {
    set(min, value)
  }

  def setMax(value: Double): this.type = {
    set(max, value)
  }
} 
Example 50
Source File: HasCheckpointIntervalParam.scala    From seahorse-workflow-executor   with Apache License 2.0 5 votes vote down vote up
package io.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import io.deepsense.deeplang.params.Params
import io.deepsense.deeplang.params.validators.RangeValidator
import io.deepsense.deeplang.params.wrappers.spark.IntParamWrapper

trait HasCheckpointIntervalParam extends Params {

  val checkpointInterval = new IntParamWrapper[
      ml.param.Params { val checkpointInterval: ml.param.IntParam }](
    name = "checkpoint interval",
    description = Some("""The checkpoint interval. E.g. 10 means that the cache will get checkpointed
        |every 10 iterations.""".stripMargin),
    sparkParamGetter = _.checkpointInterval,
    validator = RangeValidator(begin = 1.0, end = Int.MaxValue, step = Some(1.0)))
  setDefault(checkpointInterval, 10.0)
} 
Example 51
Source File: XORShiftRandomSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util.random

import scala.language.reflectiveCalls

import org.apache.commons.math3.stat.inference.ChiSquareTest
import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite
import org.apache.spark.util.Utils.times

class XORShiftRandomSuite extends SparkFunSuite with Matchers {

  private def fixture = new {
    val seed = 1L
    val xorRand = new XORShiftRandom(seed)
    val hundMil = 1e8.toInt
  }

  
    val chiTest = new ChiSquareTest
    assert(chiTest.chiSquareTest(bins, 0.05) === false)
  }

  test ("XORShift with zero seed") {
    val random = new XORShiftRandom(0L)
    assert(random.nextInt() != 0)
  }

  test ("hashSeed has random bits throughout") {
    val totalBitCount = (0 until 10).map { seed =>
      val hashed = XORShiftRandom.hashSeed(seed)
      val bitCount = java.lang.Long.bitCount(hashed)
      // make sure we have roughly equal numbers of 0s and 1s.  Mostly just check that we
      // don't have all 0s or 1s in the high bits
      bitCount should be > 20
      bitCount should be < 44
      bitCount
    }.sum
    // and over all the seeds, very close to equal numbers of 0s & 1s
    totalBitCount should be > (32 * 10 - 30)
    totalBitCount should be < (32 * 10 + 30)
  }
} 
Example 52
Source File: DiskBlockManagerSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.Utils

class DiskBlockManagerSuite extends SparkFunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    try {
      Utils.deleteRecursively(rootDir0)
      Utils.deleteRecursively(rootDir1)
    } finally {
      super.afterAll()
    }
  }

  override def beforeEach() {
    super.beforeEach()
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(conf, deleteFilesOnStop = true)
  }

  override def afterEach() {
    try {
      diskBlockManager.stop()
    } finally {
      super.afterEach()
    }
  }

  test("basic block creation") {
    val blockId = new TestBlockId("test")
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 53
Source File: Main.scala    From ros_hadoop   with Apache License 2.0 5 votes vote down vote up
package de.valtech.foss

import scala.io.Source
import scala.collection.mutable.Map
import scala.collection.mutable.ListBuffer
import scala.collection.JavaConverters._
import Console.{GREEN, RED, RESET}
import scala.language.reflectiveCalls

import java.io.File
import java.io.FileInputStream
import java.io.FileOutputStream
import java.nio.channels.FileChannel.MapMode._
import java.nio.ByteOrder._
import java.nio.ByteBuffer

import de.valtech.foss.proto.RosbagIdxOuterClass.RosbagIdx

object Main extends App {
  def help() = {
    Console.err.printf(s"""
${RESET}${GREEN}Usage:
	--file <ros.bag> file to process
	--version print Rosbag version and exit
	--offset <offset> --number <records> Seek at offset < 1073741824 and read the specified number of records
${RESET}By default will just create the protobuf idx file needed for configuration.\n\n""")
    sys.exit(0)
  }

  val pargs = Map[String,AnyRef]()
  def process_cli(args: List[String]) :Boolean = args match {
    case Nil => true // parse success
    case "-v" :: rest => pargs += ("version" -> Some(true)); process_cli(rest)
    case "--version" :: rest => pargs += ("version" -> Some(true)); process_cli(rest)
    case "-f" :: x :: rest => pargs += ("file" -> x); process_cli(rest)
    case "--file" :: x :: rest => pargs += ("file" -> x); process_cli(rest)
    case "-n" :: x :: rest => pargs += ("number" -> Some(x.toInt)); process_cli(rest)
    case "--number" :: x :: rest => pargs += ("number" -> Some(x.toInt)); process_cli(rest)
    case "-o" :: x :: rest => pargs += ("offset" -> Some(x.toInt)); process_cli(rest)
    case "--offset" :: x :: rest => pargs += ("offset" -> Some(x.toInt)); process_cli(rest)
    case "-h" :: rest => help(); false
    case "--help" :: rest => help(); false
    case _ => Console.err.printf(s"${RESET}${RED}Unknown argument " + args.head); false
  }
  process_cli(args.toList)

  def use[T <: { def close() }]
    (resource: T)
    (code: T ⇒ Unit) =
    try
      code(resource)
    finally
      resource.close()

  pargs("file") match {
    case f:String => process()
    case _ => help()
  }

  def process(): Unit = {
    val fin = new File(pargs("file").asInstanceOf[String])
    use(new FileInputStream(fin)) { stream => {
      //printf("min: %s\n", Math.min(1073741824, fin.length) )
      val buffer = stream.getChannel.map(READ_ONLY, 0, Math.min(1073741824, fin.length)).order(LITTLE_ENDIAN)
      val p:RosbagParser = new RosbagParser(buffer)
      val version = p.read_version()
      val h = p.read_record().get
      if(pargs contains "version") {
        printf("%s\n%s\n\n", version, h)
        return
      }
      if(pargs contains "number"){
        buffer position pargs.getOrElse("offset",None).asInstanceOf[Option[Int]].getOrElse(0)
        for(i <- List.range(0,pargs("number").asInstanceOf[Option[Int]].getOrElse(0)))
          println(p.read_record)
        return
      }
      val idxpos = h.header.fields("index_pos").asInstanceOf[Long]
      //printf("idxpos: %s %s\n", idxpos, Math.min(1073741824, fin.length) )
      val b = stream.getChannel.map(READ_ONLY, idxpos, Math.min(1073741824, fin.length - idxpos)).order(LITTLE_ENDIAN)
      val pp:RosbagParser = new RosbagParser(b)
      val c = pp.read_connections(h.header, Nil)
      val chunk_idx = pp.read_chunk_infos(c)
      Console.err.printf(s"""${RESET}${GREEN}Found: """
          + chunk_idx.size
          +s""" chunks\n${RESET}It should be the same number reported by rosbag tool.\nIf you encounter any issues try reindexing your file and submit an issue.
          ${RESET}\n""")
      val fout = new FileOutputStream(pargs("file").asInstanceOf[String] + ".idx.bin")
      val builder = RosbagIdx.newBuilder
      for(i <- chunk_idx) builder.addArray(i)
      builder.build().writeTo(fout)
      fout.close()
      //printf("[%s]\n",chunk_idx.toArray.mkString(","))
    }}
  }
} 
Example 54
Source File: GeneralMaxConvolutionTests.scala    From scalismo-faces   with Apache License 2.0 5 votes vote down vote up
package scalismo.faces.image

import breeze.numerics.log
import org.scalactic.Equality
import scalismo.faces.FacesTestSuite
import scalismo.faces.image.filter.GeneralMaxConvolution
import scalismo.geometry._
import scalismo.sampling.DistributionEvaluator
import scala.language.reflectiveCalls

class GeneralMaxConvolutionTests extends FacesTestSuite {

  new Equality[Double] {
    def areEqual(a: Double, b: Any): Boolean =
      b match {
        case p: Double => a === p +- 0.0
        case _ => false
      }
  }

  class Evaluator1D(sdev: Double) extends DistributionEvaluator[Point[_1D]] {
    override def logValue(sample: Point[_1D]): Double = -math.pow(sample.x, 2)/2.0/sdev/sdev
  }

  class Evaluator2D(sdev: Double) extends DistributionEvaluator[Point[_2D]] {
    override def logValue(sample: Point[_2D]): Double = -sample.toVector.norm2/2.0/sdev/sdev
  }

  describe("general max convolution") {

    def fixture = new {
      val noise = 1
      val eval = new Evaluator1D(noise)
      val eval2d = new Evaluator2D(noise)
      val width = 5
      val height = 5
      val image = PixelImage(width, height, (x, y) => {
        (x + y).toDouble
      }).map(p => log(p / (width + height - 2) * 0.9 + 0.1))
      val row = image.row(0)
    }

    describe("in 1d") {
      it("should be calculated correctly") {
        val f = fixture
        val result = GeneralMaxConvolution.maxConvolution1D(f.row.toArray, f.eval)

        def evalForPosition(p: Int) {
          val allValues = f.row.toArray.zipWithIndex.map { case(v, index) => v + f.eval.logValue(Point1D(index - p)) }
          result(p) shouldEqual allValues.max
        }

        for (p <- 0 until f.width) {
          evalForPosition(p)
        }

      }
    }

    describe("in 2d") {
      it("should be correct using the separable version") {
        val f = fixture
        val result = GeneralMaxConvolution.separable2D(f.image, f.eval)

        def evalForPosition(p: Int, q: Int) {
          val allValues = (0 until f.width).flatMap { x =>
            (0 until f.height).map { y =>
              f.image(x, y) + f.eval2d.logValue(Point2D(x - p, y - q))
            }
          }
          result(p, q) shouldEqual allValues.max
        }

        for (p <- 0 until f.width; q <- 0 until f.height) {
          evalForPosition(p, q)
        }
      }
    }
  }
} 
Example 55
Source File: CollectionCache.scala    From incubator-s2graph   with Apache License 2.0 5 votes vote down vote up
package org.apache.s2graph.counter.util

import java.net.InetAddress
import java.util.concurrent.TimeUnit

import com.google.common.cache.{Cache, CacheBuilder}
import org.slf4j.LoggerFactory

import scala.concurrent.{ExecutionContext, Future}
import scala.language.{postfixOps, reflectiveCalls}

case class CollectionCacheConfig(maxSize: Int, ttl: Int, negativeCache: Boolean = false, negativeTTL: Int = 600)

class CollectionCache[C <: { def nonEmpty: Boolean; def isEmpty: Boolean } ](config: CollectionCacheConfig) {
  private val cache: Cache[String, C] = CacheBuilder.newBuilder()
    .expireAfterWrite(config.ttl, TimeUnit.SECONDS)
    .maximumSize(config.maxSize)
    .build[String, C]()

//  private lazy val cache = new SynchronizedLruMap[String, (C, Int)](config.maxSize)
  private lazy val className = this.getClass.getSimpleName

  private lazy val log = LoggerFactory.getLogger(this.getClass)
  val localHostname = InetAddress.getLocalHost.getHostName

  def size = cache.size
  val maxSize = config.maxSize

  // cache statistics
  def getStatsString: String = {
    s"$localHostname ${cache.stats().toString}"
  }

  def withCache(key: String)(op: => C): C = {
    Option(cache.getIfPresent(key)) match {
      case Some(r) => r
      case None =>
        val r = op
        if (r.nonEmpty || config.negativeCache) {
          cache.put(key, r)
        }
        r
    }
  }

  def withCacheAsync(key: String)(op: => Future[C])(implicit ec: ExecutionContext): Future[C] = {
    Option(cache.getIfPresent(key)) match {
      case Some(r) => Future.successful(r)
      case None =>
        op.map { r =>
          if (r.nonEmpty || config.negativeCache) {
            cache.put(key, r)
          }
          r
        }
    }
  }

  def purgeKey(key: String) = {
    cache.invalidate(key)
  }

  def contains(key: String): Boolean = {
    Option(cache.getIfPresent(key)).nonEmpty
  }
} 
Example 56
Source File: PlayJsonSupport.scala    From incubator-s2graph   with Apache License 2.0 5 votes vote down vote up
package org.apache.s2graph.http

import java.nio.charset.Charset

import akka.http.scaladsl.marshalling.{Marshaller, ToEntityMarshaller}
import akka.http.scaladsl.model._
import akka.http.scaladsl.unmarshalling.{FromEntityUnmarshaller, Unmarshaller}
import akka.util.ByteString
import play.api.libs.json._

trait PlayJsonSupport {

  private val mediaTypes: Seq[MediaType.WithFixedCharset] =
    Seq(MediaType.applicationWithFixedCharset("json", HttpCharsets.`UTF-8`, "js"))

  private val unmarshallerContentTypes: Seq[ContentTypeRange] = mediaTypes.map(ContentTypeRange.apply)

  implicit val playJsonMarshaller: ToEntityMarshaller[JsValue] = {
    Marshaller.oneOf(mediaTypes: _*) { mediaType =>
      Marshaller.withFixedContentType(ContentType(mediaType)) {
        json => HttpEntity(mediaType, json.toString)
      }
    }
  }

  implicit val playJsonUnmarshaller: FromEntityUnmarshaller[JsValue] = {
    Unmarshaller.byteStringUnmarshaller
      .forContentTypes(unmarshallerContentTypes: _*)
      .map {
        case ByteString.empty => throw Unmarshaller.NoContentException
        case data => Json.parse(data.decodeString(Charset.forName("UTF-8")))
      }
  }

  trait ToPlayJson[T] {
    def toJson(msg: T): JsValue
  }

  import scala.language.reflectiveCalls

  object ToPlayJson {
    type ToPlayJsonReflective = {
      def toJson: JsValue
    }

    implicit def forToJson[A <: ToPlayJsonReflective] = new ToPlayJson[A] {
      def toJson(js: A) = js.toJson
    }

    implicit def forPlayJson[A <: JsValue] = new ToPlayJson[A] {
      def toJson(js: A) = js
    }
  }

  implicit object JsErrorJsonWriter extends Writes[JsError] {
    def writes(o: JsError): JsValue = Json.obj(
      "errors" -> JsArray(
        o.errors.map {
          case (path, validationErrors) => Json.obj(
            "path" -> Json.toJson(path.toString()),
            "validationErrors" -> JsArray(validationErrors.map(validationError => Json.obj(
              "message" -> JsString(validationError.message),
              "args" -> JsArray(validationError.args.map {
                case x: Int => JsNumber(x)
                case x => JsString(x.toString)
              })
            )))
          )
        }
      )
    )
  }

} 
Example 57
Source File: SampleApp.scala    From reliable-http-client   with Apache License 2.0 5 votes vote down vote up
package rhttpc.sample

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model._
import akka.http.scaladsl.server._
import akka.pattern._
import akka.stream.ActorMaterializer
import akka.util.Timeout
import rhttpc.akkahttp.ReliableHttpClientFactory
import rhttpc.akkapersistence.{RecoverAllActors, RecoverableActorsManager, SendMsgToChild}
import rhttpc.client.subscription.ReplyFuture

import scala.concurrent.Await
import scala.concurrent.duration._
import scala.language.reflectiveCalls

object SampleApp extends App with Directives {
  implicit val system = ActorSystem("rhttpc-sample")
  implicit val materializer = ActorMaterializer()
  import system.dispatcher

  val rhttpc = Await.result(ReliableHttpClientFactory().withOwnAmqpConnection.inOutWithSubscriptions(), 10 seconds)

  val client = new DelayedEchoClient {
    override def requestResponse(msg: String): ReplyFuture = {
      rhttpc.send(HttpRequest().withUri("http://sampleecho:8082").withMethod(HttpMethods.POST).withEntity(msg))
    }
  }

  val manager = system.actorOf(RecoverableActorsManager.props(
    FooBarActor.persistenceCategory,
    id => FooBarActor.props(id, rhttpc.subscriptionManager, client)
  ), "foobar")

  Await.result((manager ? RecoverAllActors)(Timeout(20 seconds)), 15 seconds)

  rhttpc.start()

  val route =
    path("healthcheck") {
      get {
        complete("OK")
      }
    } ~
    path(Segment) { id =>
      (post & entity(as[String])) { msg =>
        complete {
          implicit val sendMsgTimeout = Timeout(5 seconds)
          (manager ? SendMsgToChild(id, SendMsg(msg))).map(_ => "OK")
        }
      } ~
      get {
        complete {
          implicit val currentStateTimeout = Timeout(5 seconds)
          (manager ? SendMsgToChild(id, CurrentState)).mapTo[FooBarState].map(_.toString)
        }
      }
    }

  Http().bindAndHandle(route, interface = "0.0.0.0", port = 8081).map { binding =>
    Runtime.getRuntime.addShutdownHook(new Thread {
      override def run(): Unit = {
        Await.result(rhttpc.stop(), 10 seconds)
      }
    })
  }
} 
Example 58
Source File: Play27.scala    From sbt-reactive-app   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.rp.sbtreactiveapp.magic

import sbt.AutoPlugin

import scala.language.reflectiveCalls
import scala.util.Try

object Play27 {
  def playPlugin(classLoader: ClassLoader): Try[AutoPlugin] =
    withContextClassloader(classLoader) { loader =>
      getSingletonObject[AutoPlugin](loader, "play.sbt.PlayWeb$")
    }

  def version: Option[String] = {
    // The method signature equals the signature of `play.core.PlayVersion`
    type PlayVersion = {
      def current: String
    }

    withContextClassloader(this.getClass.getClassLoader) { loader =>
      getSingletonObject[PlayVersion](loader, "play.core.PlayVersion$")
        .map(_.current)
        .toOption
    }
  }
} 
Example 59
Source File: Play26.scala    From sbt-reactive-app   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.rp.sbtreactiveapp.magic

import sbt.AutoPlugin

import scala.language.reflectiveCalls
import scala.util.Try

object Play26 {
  def playPlugin(classLoader: ClassLoader): Try[AutoPlugin] =
    withContextClassloader(classLoader) { loader =>
      getSingletonObject[AutoPlugin](loader, "play.sbt.Play$")
    }

  def version: Option[String] = {
    // The method signature equals the signature of `play.core.PlayVersion`
    type PlayVersion = {
      def current: String
    }

    withContextClassloader(this.getClass.getClassLoader) { loader =>
      getSingletonObject[PlayVersion](loader, "play.core.PlayVersion$")
        .map(_.current)
        .toOption
    }
  }
} 
Example 60
Source File: package.scala    From bigquery4s   with MIT License 5 votes vote down vote up
import scala.language.reflectiveCalls

package object bigquery4s {

  type Closable = { def close() }

  
  def using[R <: Closable, A](resource: R)(f: R => A): A = {
    try {
      f(resource)
    } finally {
      try {
        resource.close()
      } catch {
        case ignore: Exception =>
      }
    }
  }

  lazy val homeDir: String = System.getProperty("user.home")

} 
Example 61
Source File: ExecutionSpec.scala    From wookiee   with Apache License 2.0 5 votes vote down vote up
package com.webtrends.harness.libs.iteratee

import scala.language.reflectiveCalls

import org.specs2.mutable._
import scala.concurrent.{ ExecutionContext, Future, Await }
import scala.concurrent.duration.{ Duration, SECONDS }
import scala.util.Try

object ExecutionSpec extends Specification {
  import Execution.trampoline

  val waitTime = Duration(5, SECONDS)

  "trampoline" should {

    "execute code in the same thread" in {
      val f = Future(Thread.currentThread())(trampoline)
      Await.result(f, waitTime) must equalTo(Thread.currentThread())
    }

    "not overflow the stack" in {
      def executeRecursively(ec: ExecutionContext, times: Int) {
        if (times > 0) {
          ec.execute(new Runnable {
            def run() = executeRecursively(ec, times - 1)
          })
        }
      }

      // Work out how deep to go to cause an overflow
      val overflowingExecutionContext = new ExecutionContext {
        def execute(runnable: Runnable): Unit = {
          runnable.run()
        }
        def reportFailure(t: Throwable): Unit = t.printStackTrace()
      }

      var overflowTimes = 1 << 10
      try {
        while (overflowTimes > 0) {
          executeRecursively(overflowingExecutionContext, overflowTimes)
          overflowTimes = overflowTimes << 1
        }
        sys.error("Can't get the stack to overflow")
      } catch {
        case _: StackOverflowError => ()
      }

      // Now verify that we don't overflow
      Try(executeRecursively(trampoline, overflowTimes)) must beSuccessfulTry[Unit]
    }

    "execute code in the order it was submitted" in {
      val runRecord = scala.collection.mutable.Buffer.empty[Int]
      case class TestRunnable(id: Int, children: Runnable*) extends Runnable {
        def run() = {
          runRecord += id
          for (c <- children) trampoline.execute(c)
        }
      }

      trampoline.execute(
        TestRunnable(0,
          TestRunnable(1),
          TestRunnable(2,
            TestRunnable(4,
              TestRunnable(6),
              TestRunnable(7)),
            TestRunnable(5,
              TestRunnable(8))),
          TestRunnable(3))
      )

      runRecord must equalTo(0 to 8)
    }

  }

} 
Example 62
Source File: NonBlockingMutexSpec.scala    From wookiee   with Apache License 2.0 5 votes vote down vote up
package com.webtrends.harness.libs.concurrent

import scala.language.reflectiveCalls

import org.specs2.mutable._
import java.util.concurrent.atomic.AtomicInteger
import scala.concurrent.{ ExecutionContext, Promise, Future, Await }
import scala.concurrent.duration.{ Duration, SECONDS }

object NonBlockingMutexSpec extends Specification {

  val waitTime = Duration(2, SECONDS)

  trait Tester {
    def run(body: => Unit): Unit
  }

  class MutexTester extends Tester {
    val mutex = new NonBlockingMutex()
    def run(body: => Unit) = mutex.exclusive(body)
  }

  class NaiveTester extends Tester {
    def run(body: => Unit) = body
  }

  def countOrderingErrors(runs: Int, tester: Tester)(implicit ec: ExecutionContext): Future[Int] = {
    val result = Promise[Int]()
    val runCount = new AtomicInteger(0)
    val orderingErrors = new AtomicInteger(0)

    for (i <- 0 until runs) {
      tester.run {
        val observedRunCount = runCount.getAndIncrement()

        // We see observedRunCount != i then this task was run out of order
        if (observedRunCount != i) {
          orderingErrors.incrementAndGet() // Record the error
        }
        // If this is the last task, complete our result promise
        if ((observedRunCount + 1) >= runs) {
          result.success(orderingErrors.get)
        }
      }
    }
    result.future
  }

  "NonBlockingMutex" should {

    "run a single operation" in {
      val p = Promise[Int]()
      val mutex = new NonBlockingMutex()
      mutex.exclusive { p.success(1) }
      Await.result(p.future, waitTime) must_== (1)
    }

    "run two operations" in {
      val p1 = Promise[Unit]()
      val p2 = Promise[Unit]()
      val mutex = new NonBlockingMutex()
      mutex.exclusive { p1.success(()) }
      mutex.exclusive { p2.success(()) }
      Await.result(p1.future, waitTime) must_== (())
      Await.result(p2.future, waitTime) must_== (())
    }

    "run code in order" in {
      import ExecutionContext.Implicits.global

      def percentageOfRunsWithOrderingErrors(runSize: Int, tester: Tester): Int = {
        val results: Seq[Future[Int]] = for (i <- 0 until 9) yield {
          countOrderingErrors(runSize, tester)
        }
        Await.result(Future.sequence(results), waitTime).filter(_ > 0).size * 10
      }

      // Iteratively increase the run size until we get observable errors 90% of the time
      // We want a high error rate because we want to then use the MutexTester
      // on the same run size and know that it is fixing up some problems. If the run size
      // is too small then the MutexTester probably isn't doing anything. We use
      // dynamic run sizing because the actual size that produces errors will vary
      // depending on the environment in which this test is run.
      var runSize = 8 // This usually reaches 8192 on my dev machine with 10 simultaneous queues
      var errorPercentage = 0
      while (errorPercentage < 90 && runSize < 1000000) {
        runSize = runSize << 1
        errorPercentage = percentageOfRunsWithOrderingErrors(runSize, new NaiveTester())
      }
      //println(s"Got $errorPercentage% ordering errors on run size of $runSize")

      // Now show that this run length works fine with the MutexTester
      percentageOfRunsWithOrderingErrors(runSize, new MutexTester()) must_== 0
    }

  }

} 
Example 63
Source File: JustinDB.scala    From JustinDB   with Apache License 2.0 5 votes vote down vote up
package justin.db

import akka.actor.ActorSystem
import akka.cluster.Cluster
import akka.cluster.http.management.ClusterHttpManagement
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.Directives._
import akka.stream.{ActorMaterializer, Materializer}
import buildinfo.BuildInfo
import com.typesafe.scalalogging.StrictLogging
import justin.db.actors.{StorageNodeActor, StorageNodeActorRef}
import justin.db.client.ActorRefStorageNodeClient
import justin.db.cluster.datacenter.Datacenter
import justin.db.consistenthashing.{NodeId, Ring}
import justin.db.replica.N
import justin.db.storage.PluggableStorageProtocol
import justin.db.storage.provider.StorageProvider
import justin.httpapi.{BuildInfoRouter, HealthCheckRouter, HttpRouter}

import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext, Promise}
import scala.language.reflectiveCalls

// $COVERAGE-OFF$
final class JustinDB

object JustinDB extends StrictLogging {

  private[this] def validConfiguration(justinDBConfig: JustinDBConfig): Unit = {
    require(justinDBConfig.replication.N > 0, "replication N factor can't be smaller or equal 0")
    require(justinDBConfig.ring.`members-count` > 0, "members-counter can't be smaller or equal 0")
    require(justinDBConfig.ring.partitions > 0, "ring partitions can't be smaller or equal 0")
    require(justinDBConfig.ring.partitions >= justinDBConfig.ring.`members-count`, "number of ring partitions can't be smaller than number of members-count")
    require(justinDBConfig.replication.N <= justinDBConfig.ring.`members-count`, "replication N factor can't be bigger than defined members-count number")
  }

  private[this] def initStorage(justinConfig: JustinDBConfig) = {
    val provider = StorageProvider.apply(justinConfig.storage.provider)
    logger.info("Storage provider: " + provider.name)
    provider.init
  }

  def init(justinConfig: JustinDBConfig)(implicit actorSystem: ActorSystem): JustinDB = {
    validConfiguration(justinConfig)

    val processOrchestrator = Promise[JustinDB]

    implicit val executor: ExecutionContext = actorSystem.dispatcher
    implicit val materializer: Materializer = ActorMaterializer()

    val storage: PluggableStorageProtocol = initStorage(justinConfig)

    val cluster = Cluster(actorSystem)

    cluster.registerOnMemberUp {
      // STORAGE ACTOR
      val storageNodeActorRef = StorageNodeActorRef {
        val nodeId     = NodeId(justinConfig.`kubernetes-hostname`.split("-").last.toInt)
        val ring       = Ring(justinConfig.ring.`members-count`, justinConfig.ring.partitions)
        val n          = N(justinConfig.replication.N)
        val datacenter = Datacenter(justinConfig.dc.`self-data-center`)

        actorSystem.actorOf(
          props = StorageNodeActor.props(nodeId, datacenter, storage, ring, n),
          name  = StorageNodeActor.name(nodeId, datacenter)
        )
      }

      // AKKA-MANAGEMENT
      ClusterHttpManagement(cluster).start().map { _ =>
        logger.info("Cluster HTTP-Management is ready!")
      }.recover { case ex => processOrchestrator.failure(ex) }

      // HTTP API
      val routes = logRequestResult(actorSystem.name) {
        new HttpRouter(new ActorRefStorageNodeClient(storageNodeActorRef)).routes ~
          new HealthCheckRouter().routes ~
          new BuildInfoRouter().routes(BuildInfo.toJson)
      }
      Http()
        .bindAndHandle(routes, justinConfig.http.interface, justinConfig.http.port)
        .map { binding => logger.info(s"HTTP server started at ${binding.localAddress}"); processOrchestrator.trySuccess(new JustinDB) }
        .recover { case ex => logger.error("Could not start HTTP server", ex); processOrchestrator.failure(ex) }
    }

    Await.result(processOrchestrator.future, 2.minutes)
  }
}
// $COVERAGE-ON$ 
Example 64
Source File: QueryRequestBuilder.scala    From scalikejdbc-bigquery   with Apache License 2.0 5 votes vote down vote up
package scalikejdbc

import java.time.ZoneId

import com.google.cloud.bigquery.{QueryJobConfiguration, QueryParameterValue}
import scalikejdbc.bigquery.{BqParameter, BqPreparedStatement, Format}

import scala.collection.JavaConverters._
import scala.language.reflectiveCalls

object QueryRequestBuilder {

  private val LocalDateEpoch = java.time.LocalDate.ofEpochDay(0)

  
  def apply(statement: SQLSyntax): QueryJobConfiguration.Builder = {

    val builder = QueryJobConfiguration.newBuilder(statement.value)
    val ps = new BqPreparedStatement

    // almost same implementation as scalikejdbc.StatementExecutor
    statement.rawParameters.zipWithIndex.foreach { case (param, index) =>
      param match {
        case binder: ParameterBinder =>
          binder(ps, index)
        case p: BigDecimal => ps.setBigDecimal(index, p.bigDecimal)
        case p: BigInt => ps.setBigDecimal(index, new java.math.BigDecimal(p.bigInteger))
        case p: Boolean => ps.setBoolean(index, p)
        case p: Byte => ps.setByte(index, p)
        case p: java.sql.Date => ps.setDate(index, p)
        case p: Double => ps.setDouble(index, p)
        case p: Float => ps.setFloat(index, p)
        case p: Int => ps.setInt(index, p)
        case p: Long => ps.setLong(index, p)
        case p: Short => ps.setShort(index, p)
        case p: String => ps.setString(index, p)
        case p: java.sql.Time => ps.setTime(index, p)
        case p: java.sql.Timestamp => ps.setTimestamp(index, p)
        case p: java.util.Date => ps.setTimestamp(index, p.toSqlTimestamp)
        case p: java.time.ZonedDateTime => ps.setTimestamp(index, java.sql.Timestamp.from(p.toInstant))
        case p: java.time.OffsetDateTime => ps.setTimestamp(index, java.sql.Timestamp.from(p.toInstant))
        case p: java.time.Instant => ps.setTimestamp(index, java.sql.Timestamp.from(p))
        case p: java.time.LocalDateTime =>
          ps.setTimestamp(index, java.sql.Timestamp.valueOf(p))
        case p: java.time.LocalDate =>
          ps.setDate(index, java.sql.Date.valueOf(p))
        case p: java.time.LocalTime =>
          val millis = p.atDate(LocalDateEpoch).atZone(java.time.ZoneId.systemDefault).toInstant.toEpochMilli
          val time = new java.sql.Time(millis)
          ps.setTime(index, time)
        case p =>
          param.getClass.getCanonicalName match {
            case "org.joda.time.DateTime" =>
              val t = p.asInstanceOf[ {def toDate: java.util.Date}].toDate.toSqlTimestamp
              ps.setTimestamp(index, t)
            case "org.joda.time.LocalDateTime" =>
              val t = p.asInstanceOf[ {def toDate: java.util.Date}].toDate.toSqlTimestamp
              ps.setTimestamp(index, t)
            case "org.joda.time.LocalDate" =>
              val t = p.asInstanceOf[ {def toDate: java.util.Date}].toDate.toSqlDate
              ps.setDate(index, t)
            case "org.joda.time.LocalTime" =>
              val millis = p.asInstanceOf[ {def toDateTimeToday: {def getMillis: Long}}].toDateTimeToday.getMillis
              ps.setTime(index, new java.sql.Time(millis))
            case _ =>
              throw new UnsupportedOperationException(
                s"unsupported parameter type. index: ${index}, parameter : ${param}, class: ${param.getClass}")
          }
      }
    }

    val parameters = ps.parameters.toList
      .sortBy { case (parameterIndex, _) => parameterIndex }
      .map { case (_, parameter) =>
        parameter match {
          case BqParameter.Int64(value) =>
            QueryParameterValue.int64(value)
          case BqParameter.Float64(value) =>
            QueryParameterValue.float64(value)
          case BqParameter.Bool(value) =>
            QueryParameterValue.bool(value)
          case BqParameter.String(value) =>
            QueryParameterValue.string(value)
          case BqParameter.Bytes(value) =>
            QueryParameterValue.bytes(value)
          case BqParameter.Date(value) =>
            QueryParameterValue.date(value.format(Format.date))
          case BqParameter.DateTime(value) =>
            QueryParameterValue.dateTime(value.format(Format.dateTime))
          case BqParameter.Time(value) =>
            QueryParameterValue.time(value.format(Format.time))
          case BqParameter.Timestamp(value) =>
            QueryParameterValue.timestamp(value.withZoneSameInstant(ZoneId.of("UTC")).format(Format.timestamp))
        }
      }.asJava

    builder.setPositionalParameters(parameters)
  }
} 
Example 65
Source File: NameLookupSpec.scala    From ScalaStan   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.cibo.scalastan

import org.scalatest.{FunSpec, Matchers}
import scala.language.reflectiveCalls

object StanObjectTop extends StanModel {
  val x = parameter(real())
}

class NameLookupSpec extends FunSpec with Matchers {

  object StanObjectInClass extends StanModel {
    val y = parameter(real())
  }

  describe("NameLookup") {
    it("finds names at the top level") {
      StanObjectTop.x.name shouldBe "x"
    }

    it("finds names in an inner class") {
      StanObjectInClass.y.name shouldBe "y"
    }

    it("finds names in an anonymous class") {
      val anon = new StanModel {
        val z = parameter(real())
      }
      anon.z.name shouldBe "z"
    }

    it("finds names in an object in an anonymous class") {
      class T
      val anon = new T {
        object Test extends StanModel {
          val t = parameter(real())
        }
      }
      anon.Test.t.name shouldBe "t"
    }

    it("finds names in functions") {
      new StanModel {
        def func(): Unit = {
          val a = parameter(real())
          a.name shouldBe "a"
        }
        func()
      }
    }
  }
} 
Example 66
Source File: TestConnector.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.connector.test

import java.util.ArrayList
import java.util.concurrent.LinkedBlockingQueue

import scala.concurrent.Future
import scala.concurrent.duration._
import scala.collection.JavaConverters._

import org.apache.kafka.clients.producer.RecordMetadata
import org.apache.kafka.common.TopicPartition
import common.StreamLogging

import org.apache.openwhisk.common.Counter
import org.apache.openwhisk.core.connector.Message
import org.apache.openwhisk.core.connector.MessageConsumer
import org.apache.openwhisk.core.connector.MessageProducer

class TestConnector(topic: String, override val maxPeek: Int, allowMoreThanMax: Boolean)
    extends MessageConsumer
    with StreamLogging {

  override def peek(duration: FiniteDuration, retry: Int = 0) = {
    val msgs = new ArrayList[Message]
    queue.synchronized {
      queue.drainTo(msgs, if (allowMoreThanMax) Int.MaxValue else maxPeek)
      msgs.asScala map { m =>
        offset += 1
        (topic, -1, offset, m.serialize.getBytes)
      }
    }
  }

  override def commit(retry: Int = 0) = {
    if (throwCommitException) {
      throw new Exception("commit failed")
    } else {
      // nothing to do
    }
  }

  def occupancy = queue.size

  def send(msg: Message): Future[RecordMetadata] = {
    producer.send(topic, msg)
  }

  def send(msgs: Seq[Message]): Future[RecordMetadata] = {
    import scala.language.reflectiveCalls
    producer.sendBulk(topic, msgs)
  }

  def close() = {
    closed = true
    producer.close()
  }

  private val producer = new MessageProducer {
    def send(topic: String, msg: Message, retry: Int = 0): Future[RecordMetadata] = {
      queue.synchronized {
        if (queue.offer(msg)) {
          logging.info(this, s"put: $msg")
          Future.successful(new RecordMetadata(new TopicPartition(topic, 0), 0, queue.size, -1, Long.box(-1L), -1, -1))
        } else {
          logging.error(this, s"put failed: $msg")
          Future.failed(new IllegalStateException("failed to write msg"))
        }
      }
    }

    def sendBulk(topic: String, msgs: Seq[Message]): Future[RecordMetadata] = {
      queue.synchronized {
        if (queue.addAll(msgs.asJava)) {
          logging.info(this, s"put: ${msgs.length} messages")
          Future.successful(new RecordMetadata(new TopicPartition(topic, 0), 0, queue.size, -1, Long.box(-1L), -1, -1))
        } else {
          logging.error(this, s"put failed: ${msgs.length} messages")
          Future.failed(new IllegalStateException("failed to write msg"))
        }
      }
    }

    def close() = {}
    def sentCount() = counter.next()
    val counter = new Counter()
  }

  var throwCommitException = false
  private val queue = new LinkedBlockingQueue[Message]()
  @volatile private var closed = false
  private var offset = -1L
} 
Example 67
Source File: depgraph.scala    From sbt-blockade   with Apache License 2.0 5 votes vote down vote up
//: ----------------------------------------------------------------------------
//: Copyright 2015 Johannes Rudolph
//:
//: Distributed under the Apache 2.0 License, please see the NOTICE
//: file in the root of the project for further details.
//: ----------------------------------------------------------------------------
package verizon.build

object depgraph {

  import java.io.File
  import sbt._
  import scala.collection.mutable.{HashMap, MultiMap, Set}
  import scala.language.reflectiveCalls

  object SbtUpdateReport {

    type OrganizationArtifactReport = {
      def modules: Seq[ModuleReport]
    }

    def fromConfigurationReport(report: ConfigurationReport, rootInfo: sbt.ModuleID): ModuleGraph = {
      implicit def id(sbtId: sbt.ModuleID): ModuleId = ModuleId(sbtId.organization, sbtId.name, sbtId.revision)

      def moduleEdges(orgArt: OrganizationArtifactReport): Seq[(Module, Seq[Edge])] = {
        val chosenVersion = orgArt.modules.find(!_.evicted).map(_.module.revision)
        orgArt.modules.map(moduleEdge(chosenVersion))
      }

      def moduleEdge(chosenVersion: Option[String])(report: ModuleReport): (Module, Seq[Edge]) = {
        val evictedByVersion = if (report.evicted) chosenVersion else None
        val jarFile = report.artifacts.find(_._1.`type` == "jar").orElse(report.artifacts.find(_._1.extension == "jar")).map(_._2)
        (Module(
          id = report.module,
          license = report.licenses.headOption.map(_._1),
          evictedByVersion = evictedByVersion,
          jarFile = jarFile,
          error = report.problem
        ), report.callers.map(caller ⇒ Edge(caller.caller, report.module)))
      }

      val (nodes, edges) = report.details.flatMap(moduleEdges).unzip
      val root = Module(rootInfo)

      ModuleGraph(root +: nodes, edges.flatten)
    }
  }

  type Edge = (ModuleId, ModuleId)

  def Edge(from: ModuleId, to: ModuleId): Edge = from -> to

  case class ModuleId(organisation: String,
                      name: String,
                      version: String) {
    def idString: String = organisation + ":" + name + ":" + version
  }

  case class Module(id: ModuleId,
                    license: Option[String] = None,
                    extraInfo: String = "",
                    evictedByVersion: Option[String] = None,
                    jarFile: Option[File] = None,
                    error: Option[String] = None) {
    def hadError: Boolean = error.isDefined

    def isUsed: Boolean = !isEvicted

    def isEvicted: Boolean = evictedByVersion.isDefined
  }

  case class ModuleGraph(nodes: Seq[Module], edges: Seq[Edge]) {
    lazy val modules: Map[ModuleId, Module] =
      nodes.map(n ⇒ (n.id, n)).toMap

    def module(id: ModuleId): Module = modules(id)

    lazy val dependencyMap: Map[ModuleId, Seq[Module]] =
      createMap(identity)

    lazy val reverseDependencyMap: Map[ModuleId, Seq[Module]] =
      createMap { case (a, b) ⇒ (b, a) }

    def createMap(bindingFor: ((ModuleId, ModuleId)) ⇒ (ModuleId, ModuleId)): Map[ModuleId, Seq[Module]] = {
      val m = new HashMap[ModuleId, Set[Module]] with MultiMap[ModuleId, Module]
      edges.foreach { entry ⇒
        val (f, t) = bindingFor(entry)
        m.addBinding(f, module(t))
      }
      m.toMap.mapValues(_.toSeq.sortBy(_.id.idString)).withDefaultValue(Nil)
    }

    def roots: Seq[Module] =
      nodes.filter(n ⇒ !edges.exists(_._2 == n.id)).sortBy(_.id.idString)

    def isEmpty: Boolean = nodes.isEmpty
  }

} 
Example 68
Source File: TypeClasses.scala    From Learn-Scala-Programming   with MIT License 5 votes vote down vote up
package ch04

object TypeClasses {

  object OO {
    trait Cable {
      def connect(): Boolean
    }
    case class Usb(orientation: Boolean) extends Cable {
      override def connect(): Boolean = orientation
    }
    case class Lightning(length: Int) extends Cable {
      override def connect(): Boolean = length > 100
    }
    case class UsbC(kind: String) extends Cable {
      override def connect(): Boolean = kind.contains("USB 3.1")
    }
    def connectCable(c: Cable): Boolean = c.connect()
  }

  OO.connectCable(OO.Usb(false))
  OO.connectCable(OO.Lightning(150))


  object TC {

    case class Usb(orientation: Boolean)
    case class Lightning(length: Int)
    case class UsbC[Kind](kind: Kind)

    @scala.annotation.implicitNotFound("Cannot connect cable of type ${C}")
    trait Cable[C] {
      def connect(c: C): Boolean
    }
    implicit val UsbCable: Cable[Usb] = new Cable[Usb] {
      override def connect(c: Usb): Boolean = c.orientation
    }
    implicit val LightningCable: Cable[Lightning] = (_: Lightning).length > 100

    // compile error
    // implicit val UsbCCable: Cable[UsbC] = (c: UsbC) => c.kind.contains("USB 3.1")

    implicit val UsbCCableString: Cable[UsbC[String]] = (_: UsbC[String]).kind.contains("USB 3.1")

    def connectCable[C : Cable](c: C): Boolean = implicitly[Cable[C]].connect(c)

    import scala.language.reflectiveCalls

    implicit def usbCCableDelegate[T](implicit conn: T => Boolean): Cable[UsbC[T]] =
      (c: UsbC[T]) => conn(c.kind)

    implicit val symbolConnect: Symbol => Boolean = (_: Symbol).name.toLowerCase.contains("cable")

    implicit def adapt[A: Cable, B: Cable]: Cable[(A, B)] =
      (ab: (A, B)) => implicitly[Cable[A]].connect(ab._1) && implicitly[Cable[B]].connect(ab._2)

  }

  import ch04.TypeClasses.TC._
  connectCable(Usb(false))
  connectCable(Lightning(150))
  connectCable(UsbC("USB 3.1"))
  connectCable(UsbC('NonameCable))
  connectCable(UsbC('FakeKable))



  val usb2usbC = (Usb(false), UsbC('NonameCable))
  connectCable(usb2usbC)
  val lightning2usbC = (Lightning(150), UsbC('NonameCable))
  connectCable(lightning2usbC)

  val usbC2usb2lightning2usbC = ((UsbC('NonameCable), Usb(false)), (Lightning(150), UsbC("USB 3.1")))
  connectCable(usbC2usb2lightning2usbC)

  val noUsbC_Long_Cable = (UsbC('NonameCable), (Lightning(150), UsbC(10L)))
  // connectCable(noUsbC_Long_Cable) // fails to compile

} 
Example 69
Source File: Functor.scala    From Learn-Scala-Programming   with MIT License 5 votes vote down vote up
package ch08

import scala.language.{higherKinds, reflectiveCalls}
import scala.util.Try

trait Functor[F[_]] {
  def map[A,B](in: F[A])(f: A => B): F[B]

  def mapC[A,B](f: A => B): F[A] => F[B] = fa => map(fa)(f)
}

object Functor {
  implicit val bucketFunctor: Functor[List] = new Functor[List] {
    override def map[A, B](in: List[A])(f: A => B): List[B] = in.map(f)

    override def mapC[A, B](f: A => B): List[A] => List[B] = (_: List[A]).map(f)
  }

  implicit val optionFunctor: Functor[Option] = new Functor[Option] {
    override def map[A, B](in: Option[A])(f: A => B): Option[B] = in.map(f)
    override def mapC[A, B](f: A => B): Option[A] => Option[B] = (_: Option[A]).map(f)
  }

  implicit def eitherFunctor[L] = new Functor[({ type T[A] = Either[L, A] })#T] {
    override def map[A, B](in: Either[L, A])(f: A => B): Either[L, B] = in.map(f)
    override def mapC[A, B](f: A => B): Either[L, A] => Either[L, B] = (_: Either[L, A]).map(f)
  }

  implicit val tryFunctor: Functor[Try] = new Functor[Try] {
    override def map[A, B](in: Try[A])(f: A => B): Try[B] = in.map(f)
    override def mapC[A, B](f: A => B): Try[A] => Try[B] = (_: Try[A]).map(f)
  }
} 
Example 70
Source File: Applicative.scala    From Learn-Scala-Programming   with MIT License 5 votes vote down vote up
package ch08

import scala.language.{higherKinds, reflectiveCalls}
import scala.util.{Failure, Success, Try}

trait Applicative[F[_]] extends Functor[F] {
  def apply[A,B](a: F[A])(f: F[A => B]): F[B]
  def unit[A](a: => A): F[A]

  override def map[A,B](fa: F[A])(f: A => B): F[B] =
    apply(fa)(unit(f))

  def map2[A,B,C](fa: F[A], fb: F[B])(f: (A, B) => C): F[C] =
    apply(fb)(map(fa)(f.curried))

  def map3[A,B,C,D](fa: F[A],
                    fb: F[B],
                    fc: F[C])(f: (A, B, C) => D): F[D] =
    apply(fc)(apply(fb)(apply(fa)(unit(f.curried))))

  def map4[A,B,C,D,E](fa: F[A],
                      fb: F[B],
                      fc: F[C],
                      fd: F[D])(f: (A, B, C, D) => E): F[E] = {
    val ff: (A, B, C) => D => E  = (a,b,c) => d => f(a,b,c,d)
    apply(fd)(map3(fa, fb, fc)(ff))
  }

  def product[G[_]](G: Applicative[G]): Applicative[({type f[x] = (F[x], G[x])})#f] = {
    val F = this
    new Applicative[({type f[x] = (F[x], G[x])})#f] {
      def unit[A](a: => A) = (F.unit(a), G.unit(a))
      override def apply[A,B](p: (F[A], G[A]))(fs: (F[A => B], G[A => B])) =
        (F.apply(p._1)(fs._1), G.apply(p._2)(fs._2))
    }
  }

  def compose[G[_]](G: Applicative[G]): Applicative[({type f[x] = F[G[x]]})#f] = {
    val F = this

    def fab[A, B]: G[A => B] => G[A] => G[B] = (gf: G[A => B]) => (ga: G[A]) => G.apply(ga)(gf)

    def fg[B, A](f: F[G[A => B]]): F[G[A] => G[B]] = F.map(f)(fab)

    new Applicative[({type f[x] = F[G[x]]})#f] {
      def unit[A](a: => A) = F.unit(G.unit(a))
      override def apply[A, B](a: F[G[A]])(f: F[G[A => B]]): F[G[B]] =
        F.apply(a)(fg(f))
    }
  }
}



object Applicative {
  implicit val bucketApplicative: Applicative[List] = new Applicative[List] {

    override def apply[A, B](a: List[A])(f: List[A => B]): List[B] = (a, f) match {
      case (Nil, _) => Nil
      case (_, Nil) => Nil
      case (aa :: as, ff :: fs) =>
        val fab: (A => B) => B = f => f(aa)
        ff(aa) :: as.map(ff) ::: fs.map(fab) ::: apply(as)(fs)
      case other => Nil
    }

    override def unit[A](a: => A): List[A] = List(a)
  }

  implicit val optionApplicative: Applicative[Option] = new Applicative[Option] {
    override def apply[A, B](a: Option[A])(f: Option[A => B]): Option[B] = (a,f) match {
      case (Some(a), Some(f)) => Some(f(a))
      case _ => None
    }
    override def unit[A](a: => A): Option[A] = Some(a)
  }

  implicit def eitherApplicative[L] = new Applicative[({ type T[A] = Either[L, A] })#T] {
    override def apply[A, B](a: Either[L, A])(f: Either[L, A => B]): Either[L, B] = (a, f) match {
      case (Right(a), Right(f)) => Right(f(a))
      case (Left(l), _) => Left(l)
      case (_, Left(l)) => Left(l)
    }
    override def unit[A](a: => A): Either[L, A] = Right(a)
  }

  implicit val tryApplicative: Applicative[Try] = new Applicative[Try] {
    override def apply[A, B](a: Try[A])(f: Try[A => B]): Try[B] = (a, f) match {
      case (Success(a), Success(f)) => Try(f(a))
      case (Failure(ex), _) => Failure(ex)
      case (_, Failure(ex)) => Failure(ex)
    }
    override def unit[A](a: => A): Try[A] = Success(a)
  }

} 
Example 71
Source File: Traversable.scala    From Learn-Scala-Programming   with MIT License 5 votes vote down vote up
package ch08

import ch08.Model.Bucket

import scala.language.{higherKinds, reflectiveCalls}
import scala.util.{Failure, Success, Try}
import scala.{Traversable => _}

trait Traversable[F[_]] extends Functor[F] {
  def traverse[A,B,G[_]: Applicative](a: F[A])(f: A => G[B]): G[F[B]]
  def sequence[A,G[_]: Applicative](a: F[G[A]]): G[F[A]] = traverse(a)(identity)

  implicit def compose[H[_]](implicit H: Traversable[H]): Traversable[({type f[x] = F[H[x]]})#f] = {
    val F = this
    new Traversable[({type f[x] = F[H[x]]})#f] {
      override def traverse[A, B, G[_] : Applicative](fa: F[H[A]])(f: A => G[B]) =
        F.traverse(fa)((ga: H[A]) => H.traverse(ga)(f))

      override def map[A, B](in: F[H[A]])(f: A => B): F[H[B]] =
        F.map(in)((ga: H[A]) => H.map(ga)(f))
    }
  }
}

object Traversable {

  implicit val bucketTraversable = new Traversable[Bucket] {
    override def map[A, B](in: Bucket[A])(f: A => B): Bucket[B] = Functor.bucketFunctor.map(in)(f)
    override def traverse[A, B, G[_] : Applicative](a: Bucket[A])(f: A => G[B]): G[Bucket[B]] = {
      val G = implicitly[Applicative[G]]
      a.foldRight(G.unit(List[B]()))((aa, fbs) => G.map2(f(aa), fbs)(_ :: _))
    }
  }

  implicit val optionTraversable = new Traversable[Option] {
    override def map[A, B](in: Option[A])(f: A => B): Option[B] = Functor.optionFunctor.map(in)(f)
    override def traverse[A, B, G[_] : Applicative](a: Option[A])(f: A => G[B]): G[Option[B]] = {
      val G = implicitly[Applicative[G]]
      a match {
        case Some(s) => G.map(f(s))(Some.apply)
        case None => G.unit(None)
      }
    }
  }

  implicit val tryTraversable = new Traversable[Try] {
    override def map[A, B](in: Try[A])(f: A => B): Try[B] = Functor.tryFunctor.map(in)(f)
    override def traverse[A, B, G[_] : Applicative](a: Try[A])(f: A => G[B]): G[Try[B]] = {
      val G = implicitly[Applicative[G]]
      a match {
        case Success(s) => G.map(f(s))(Success.apply)
        case Failure(ex) => G.unit(Failure(ex)) // re-wrap the ex to change the type of Failure
      }
    }
  }

  implicit def eitherTraversable[L] = new Traversable[({ type T[A] = Either[L, A] })#T] {
    override def map[A, B](in: Either[L, A])(f: A => B): Either[L, B] = Functor.eitherFunctor[L].map(in)(f)
    override def traverse[A, B, G[_] : Applicative](a: Either[L, A])(f: A => G[B]): G[Either[L, B]] = {
      val G = implicitly[Applicative[G]]
      a match {
        case Right(s) => G.map(f(s))(Right.apply)
        case Left(l) => G.unit(Left(l)) // re-wrap the l to change the type of Failure
      }
    }
  }

} 
Example 72
Source File: Assessment.scala    From Learn-Scala-Programming   with MIT License 5 votes vote down vote up
package ch07

import scala.language.higherKinds
import scala.language.reflectiveCalls

import scala.util._

object Assessment {
  implicit val booleanOr: Monoid[Boolean] = new Monoid[Boolean] {
    override def identity: Boolean = false
    override def op(l: Boolean, r: Boolean): Boolean = l || r
  }

  implicit val booleanAnd: Monoid[Boolean] = new Monoid[Boolean] {
    override def identity: Boolean = true
    override def op(l: Boolean, r: Boolean): Boolean = l && r
  }

  implicit def option[A : Monoid]: Monoid[Option[A]] = new Monoid[Option[A]] {
    override def identity: Option[A] = None
    override def op(l: Option[A], r: Option[A]): Option[A] = (l, r) match {
      case (Some(la), Some(lb)) => Option(implicitly[Monoid[A]].op(la, lb))
      case _ => l orElse r
    }
  }

  def either[L, R : Monoid]: Monoid[Either[L, R]] = new Monoid[Either[L, R]] {
    private val ma = implicitly[Monoid[R]]
    override def identity: Either[L, R] = Right(ma.identity)
    override def op(l: Either[L, R], r: Either[L, R]): Either[L, R] = (l, r) match {
      case (l @ Left(_), _) => l
      case (_, l @ Left(_)) => l
      case (Right(la), Right(lb)) => Right(ma.op(la, lb))
    }
  }

} 
Example 73
Source File: macroTools.scala    From angulate2   with MIT License 5 votes vote down vote up
//     Project: angulate2 (https://github.com/jokade/angulate2)
// Description: Common utility functions for angulate2 macros

// Copyright (c) 2016 Johannes.Kastner <[email protected]>
//               Distributed under the MIT License (see included LICENSE file)
package angulate2.internal

import de.surfice.smacrotools.{JsBlackboxMacroTools, JsWhiteboxMacroTools}

import scala.language.reflectiveCalls

trait AngulateCommonMacroTools extends de.surfice.smacrotools.JsCommonMacroTools {
  import c.universe._

  private val ignoreAnnotations = Seq("debug")

  
  def translateAngulateAnnotations(modifiers: Modifiers): List[Tree] = modifiers.annotations collect {
    case annot @ q"new $name(..$params)" if !ignoreAnnotations.contains(name.toString) =>
      q"${TermName(name.toString)}.apply(..$params)"
  }


  // TODO: simpler :)
  def getInjectionDependencies(params: Iterable[Tree]): Iterable[Dependency] =
    if(params.isEmpty) None
    else
      params map {
        case q"$mods val $name: $tpe = $e" =>
          val t = c.typecheck(tpe,c.TYPEmode).tpe
          t.typeSymbol.annotations.map(_.tree).collectFirst{
            case q"new $name( ..$params )" if name.toString == "scala.scalajs.js.annotation.JSImport" => params match {
              case Seq(Literal(Constant(module)),Literal(Constant(name))) => RequireDependency(module.toString,name.toString)
            }
          }.getOrElse(ScalaDependency(t.toString))
      }


}

trait AngulateWhiteboxMacroTools extends JsWhiteboxMacroTools with AngulateCommonMacroTools
trait AngulateBlackboxMacroTools extends JsBlackboxMacroTools with AngulateCommonMacroTools

sealed trait Dependency
case class ScalaDependency(fqn: String) extends Dependency
case class RequireDependency(module: String, name: String) extends Dependency 
Example 74
Source File: MarshallerTestSupport.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.matchers.drivers

import com.wix.e2e.http.api.Marshaller

import scala.collection.concurrent.TrieMap
import scala.language.reflectiveCalls

trait MarshallerTestSupport {
  val marshaller = new Marshaller {
    val unmarshallResult = TrieMap.empty[String, AnyRef]
    val unmarshallError = TrieMap.empty[String, Throwable]

    def unmarshall[T: Manifest](jsonStr: String) = {
      unmarshallError.get(jsonStr).foreach( throw _ )
      unmarshallResult.getOrElse(jsonStr, throw new UnsupportedOperationException)
                      .asInstanceOf[T]
    }

    def marshall[T](t: T) = ???
  }

  def givenUnmarshallerWith[T <: AnyRef](someEntity: T, forContent: String)(implicit mn: Manifest[T]): Unit =
    marshaller.unmarshallResult.put(forContent, someEntity)

  def givenBadlyBehavingUnmarshallerFor[T : Manifest](withContent: String): Unit =
    marshaller.unmarshallError.put(withContent, new RuntimeException)
}

trait CustomMarshallerProvider {
  def marshaller: Marshaller
  implicit def customMarshaller: Marshaller = marshaller
} 
Example 75
Source File: MacroGlossaryTest.scala    From rule-engine   with MIT License 5 votes vote down vote up
package nl.rabobank.oss.rules.utils

import nl.rabobank.oss.rules.dsl.core.glossaries.Glossary
import nl.rabobank.oss.rules.facts.{ListFact, SingularFact}
import org.scalatest.{FlatSpec, Matchers}

import scala.language.reflectiveCalls

class MacroGlossaryTest extends FlatSpec with Matchers {

  it should "work with macros to define facts" in {

    val firstDescription = "First fact"
    val secondDescription = "Second fact"
    val thirdDescription = "Third fact"
    val fourthDescription = "Fourth fact"


    val g = new Glossary {
      val factA = defineFact[String](firstDescription)
      val factB = defineFact[String](secondDescription)
      val factC = defineListFact[String](thirdDescription)
      val factD = defineListFact[String](fourthDescription)
    }

    g.factA.name should be("factA")
    g.factB.name should be("factB")
    g.factC.name should be("factC")
    g.factD.name should be("factD")

    g.factA.isInstanceOf[SingularFact[String]] should be(true)
    g.factB.isInstanceOf[SingularFact[String]] should be(true)
    g.factC.isInstanceOf[ListFact[String]] should be(true)
    g.factD.isInstanceOf[ListFact[String]] should be(true)

    g.facts.size should be(4)

    g.facts.get("factA").get should be(g.factA)
    g.facts.get("factB").get should be(g.factB)
    g.facts.get("factC").get should be(g.factC)
    g.facts.get("factD").get should be(g.factD)

    g.facts.get("factA").get.description should be(firstDescription)
    g.facts.get("factB").get.description should be(secondDescription)
    g.facts.get("factC").get.description should be(thirdDescription)
    g.facts.get("factD").get.description should be(fourthDescription)
  }

  it should "store concrete value type in the Fact on creation" in {
    val g = new Glossary {
      val stringFact = defineFact[String]
      val intFact = defineFact[Int]
      val intListFact = defineListFact[Int]
    }

    g.stringFact.valueType should be("String")
    g.intFact.valueType should be("Int")
    g.intListFact.valueType should be("Int")
  }

} 
Example 76
Source File: XORShiftRandomSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util.random

import scala.language.reflectiveCalls

import org.apache.commons.math3.stat.inference.ChiSquareTest
import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite
import org.apache.spark.util.Utils.times

class XORShiftRandomSuite extends SparkFunSuite with Matchers {

  private def fixture = new {
    val seed = 1L
    val xorRand = new XORShiftRandom(seed)
    val hundMil = 1e8.toInt
  }

  
    val chiTest = new ChiSquareTest
    assert(chiTest.chiSquareTest(bins, 0.05) === false)
  }

  test ("XORShift with zero seed") {
    val random = new XORShiftRandom(0L)
    assert(random.nextInt() != 0)
  }

  test ("hashSeed has random bits throughout") {
    val totalBitCount = (0 until 10).map { seed =>
      val hashed = XORShiftRandom.hashSeed(seed)
      val bitCount = java.lang.Long.bitCount(hashed)
      // make sure we have roughly equal numbers of 0s and 1s.  Mostly just check that we
      // don't have all 0s or 1s in the high bits
      bitCount should be > 20
      bitCount should be < 44
      bitCount
    }.sum
    // and over all the seeds, very close to equal numbers of 0s & 1s
    totalBitCount should be > (32 * 10 - 30)
    totalBitCount should be < (32 * 10 + 30)
  }
} 
Example 77
Source File: DiskBlockManagerSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.Utils

class DiskBlockManagerSuite extends SparkFunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    try {
      Utils.deleteRecursively(rootDir0)
      Utils.deleteRecursively(rootDir1)
    } finally {
      super.afterAll()
    }
  }

  override def beforeEach() {
    super.beforeEach()
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(conf, deleteFilesOnStop = true)
  }

  override def afterEach() {
    try {
      diskBlockManager.stop()
    } finally {
      super.afterEach()
    }
  }

  test("basic block creation") {
    val blockId = new TestBlockId("test")
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 78
Source File: Closer.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.wmexchanger.utils

import scala.language.reflectiveCalls
import scala.util.control.NonFatal

object Closer {

  protected type Closeable = {def close() : Unit}

  def close[Resource <: Closeable](resource: => Resource): Unit = resource.close()

  // This is so that exceptions caused during close are caught, but don't
  // prevent the registration of any previous exception.
  // See also https://medium.com/@dkomanov/scala-try-with-resources-735baad0fd7d.
  // Others have resource: => Closeable, but I want the resource evaluated beforehand
  // so that it doesn't throw an exception before there is anything to close.
  def autoClose[Resource <: Closeable, Result](resource: Resource)(function: Resource => Result): Result = {

    val (result: Option[Result], exception: Option[Throwable]) = try {
      (Some(function(resource)), None)
    }
    catch {
      case exception: Throwable => (None, Some(exception))
    }

    val closeException: Option[Throwable] = Option(resource).flatMap { resource =>
      try {
        resource.close()
        None
      }
      catch {
        case exception: Throwable => Some(exception)
      }
    }

    (exception, closeException) match {
      case (None, None) => result.get
      case (Some(ex), None) => throw ex
      case (None, Some(ex)) => throw ex
      case (Some(ex), Some(closeEx)) => (ex, closeEx) match {
        case (e, NonFatal(nonfatal)) =>
          // Put the potentially fatal one first.
          e.addSuppressed(nonfatal)
          throw e
        case (NonFatal(nonfatal), e) =>
          // Put the potentially fatal one first.
          e.addSuppressed(nonfatal)
          throw e
        case (e, closeE) =>
          // On tie, put exception before closeException.
          e.addSuppressed(closeE)
          throw e
      }
    }
  }

  // Allow for alternative syntax closeable.autoClose { closeable => ... }
  implicit class AutoCloser[Resource <: Closer.Closeable](resource: Resource) {

    def autoClose[Result](function: Resource => Result): Result = Closer.autoClose(resource)(function)
  }
} 
Example 79
Source File: Closer.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.eidos.utils

import scala.language.reflectiveCalls
import scala.util.control.NonFatal

object Closer {

  protected type Closeable = {def close() : Unit}

  def close[Resource <: Closeable](resource: => Resource): Unit = resource.close()

  // This is so that exceptions caused during close are caught, but don't
  // prevent the registration of any previous exception.
  // See also https://medium.com/@dkomanov/scala-try-with-resources-735baad0fd7d.
  // Others have resource: => Closeable, but I want the resource evaluated beforehand
  // so that it doesn't throw an exception before there is anything to close.
  def autoClose[Resource <: Closeable, Result](resource: Resource)(function: Resource => Result): Result = {

    val (result: Option[Result], exception: Option[Throwable]) = try {
      (Some(function(resource)), None)
    }
    catch {
      case exception: Throwable => (None, Some(exception))
    }

    val closeException: Option[Throwable] = Option(resource).flatMap { resource =>
      try {
        resource.close()
        None
      }
      catch {
        case exception: Throwable => Some(exception)
      }
    }

    (exception, closeException) match {
      case (None, None) => result.get
      case (Some(ex), None) => throw ex
      case (None, Some(ex)) => throw ex
      case (Some(ex), Some(closeEx)) => (ex, closeEx) match {
        case (e, NonFatal(nonfatal)) =>
          // Put the potentially fatal one first.
          e.addSuppressed(nonfatal)
          throw e
        case (NonFatal(nonfatal), e) =>
          // Put the potentially fatal one first.
          e.addSuppressed(nonfatal)
          throw e
        case (e, closeE) =>
          // On tie, put exception before closeException.
          e.addSuppressed(closeE)
          throw e
      }
    }
  }

  // Allow for alternative syntax closeable.autoClose { closeable => ... }
  implicit class AutoCloser[Resource <: Closer.Closeable](resource: Resource) {

    def autoClose[Result](function: Resource => Result): Result = Closer.autoClose(resource)(function)
  }
} 
Example 80
Source File: Closer.scala    From eidos   with Apache License 2.0 5 votes vote down vote up
package org.clulab.wm.elasticsearch.utils

import scala.language.reflectiveCalls
import scala.util.control.NonFatal

object Closer {

  protected type Closeable = {def close() : Unit}

  def close[Resource <: Closeable](resource: => Resource): Unit = resource.close()

  // This is so that exceptions caused during close are caught, but don't
  // prevent the registration of any previous exception.
  // See also https://medium.com/@dkomanov/scala-try-with-resources-735baad0fd7d.
  // Others have resource: => Closeable, but I want the resource evaluated beforehand
  // so that it doesn't throw an exception before there is anything to close.
  def autoClose[Resource <: Closeable, Result](resource: Resource)(function: Resource => Result): Result = {

    val (result: Option[Result], exception: Option[Throwable]) = try {
      (Some(function(resource)), None)
    }
    catch {
      case exception: Throwable => (None, Some(exception))
    }

    val closeException: Option[Throwable] = Option(resource).flatMap { resource =>
      try {
        resource.close()
        None
      }
      catch {
        case exception: Throwable => Some(exception)
      }
    }

    (exception, closeException) match {
      case (None, None) => result.get
      case (Some(ex), None) => throw ex
      case (None, Some(ex)) => throw ex
      case (Some(ex), Some(closeEx)) => (ex, closeEx) match {
        case (e, NonFatal(nonfatal)) =>
          // Put the potentially fatal one first.
          e.addSuppressed(nonfatal)
          throw e
        case (NonFatal(nonfatal), e) =>
          // Put the potentially fatal one first.
          e.addSuppressed(nonfatal)
          throw e
        case (e, closeE) =>
          // On tie, put exception before closeException.
          e.addSuppressed(closeE)
          throw e
      }
    }
  }

  // Allow for alternative syntax closeable.autoClose { closeable => ... }
  implicit class AutoCloser[Resource <: Closer.Closeable](resource: Resource) {

    def autoClose[Result](function: Resource => Result): Result = Closer.autoClose(resource)(function)
  }
} 
Example 81
Source File: ScurutoValidationRules.scala    From scuruto   with MIT License 5 votes vote down vote up
package validator

import skinny.validator.ValidationRule

import scala.language.reflectiveCalls

object validChar extends ValidationRule {
  def name = "validChar"
  override def isValid(v: Any): Boolean = isEmpty(v) || !v.toString.exists(_.isSurrogate)
}

object validChars extends ValidationRule {
  def name = "validChar"
  override def isValid(s: Any): Boolean = {
    s match {
      case seq: Seq[Any] =>
        seq.forall { v =>
          isEmpty(v) || !v.toString.exists(_.isSurrogate)
        }
      case _ =>
        false
    }
  }
}

case class maxLengths(max: Int) extends ValidationRule {
  def name = "maxLength"
  override def messageParams = Seq(max.toString)
  def isValid(s: Any): Boolean = {
    s match {
      case seq: Seq[Any] =>
        seq.forall { v =>
          isEmpty(v) || {
            toHasSize(v).map(x => x.size <= max)
              .getOrElse(v.toString.length <= max)
          }
        }
      case _ =>
        false
    }
  }
} 
Example 82
Source File: HashShuffleManagerSuite.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.shuffle.hash

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.scalatest.FunSuite

import org.apache.spark.{SparkEnv, SparkContext, LocalSparkContext, SparkConf}
import org.apache.spark.executor.ShuffleWriteMetrics
import org.apache.spark.network.buffer.{FileSegmentManagedBuffer, ManagedBuffer}
import org.apache.spark.serializer.JavaSerializer
import org.apache.spark.shuffle.FileShuffleBlockManager
import org.apache.spark.storage.{ShuffleBlockId, FileSegment}

class HashShuffleManagerSuite extends FunSuite with LocalSparkContext {
  private val testConf = new SparkConf(false)

  private def checkSegments(expected: FileSegment, buffer: ManagedBuffer) {
    assert(buffer.isInstanceOf[FileSegmentManagedBuffer])
    val segment = buffer.asInstanceOf[FileSegmentManagedBuffer]
    assert(expected.file.getCanonicalPath === segment.getFile.getCanonicalPath)
    assert(expected.offset === segment.getOffset)
    assert(expected.length === segment.getLength)
  }

  test("consolidated shuffle can write to shuffle group without messing existing offsets/lengths") {

    val conf = new SparkConf(false)
    // reset after EACH object write. This is to ensure that there are bytes appended after
    // an object is written. So if the codepaths assume writeObject is end of data, this should
    // flush those bugs out. This was common bug in ExternalAppendOnlyMap, etc.
    conf.set("spark.serializer.objectStreamReset", "1")
    conf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer")
    conf.set("spark.shuffle.manager", "org.apache.spark.shuffle.hash.HashShuffleManager")

    sc = new SparkContext("local", "test", conf)

    val shuffleBlockManager =
      SparkEnv.get.shuffleManager.shuffleBlockManager.asInstanceOf[FileShuffleBlockManager]

    val shuffle1 = shuffleBlockManager.forMapTask(1, 1, 1, new JavaSerializer(conf),
      new ShuffleWriteMetrics)
    for (writer <- shuffle1.writers) {
      writer.write("test1")
      writer.write("test2")
    }
    for (writer <- shuffle1.writers) {
      writer.commitAndClose()
    }

    val shuffle1Segment = shuffle1.writers(0).fileSegment()
    shuffle1.releaseWriters(success = true)

    val shuffle2 = shuffleBlockManager.forMapTask(1, 2, 1, new JavaSerializer(conf),
      new ShuffleWriteMetrics)

    for (writer <- shuffle2.writers) {
      writer.write("test3")
      writer.write("test4")
    }
    for (writer <- shuffle2.writers) {
      writer.commitAndClose()
    }
    val shuffle2Segment = shuffle2.writers(0).fileSegment()
    shuffle2.releaseWriters(success = true)

    // Now comes the test :
    // Write to shuffle 3; and close it, but before registering it, check if the file lengths for
    // previous task (forof shuffle1) is the same as 'segments'. Earlier, we were inferring length
    // of block based on remaining data in file : which could mess things up when there is concurrent read
    // and writes happening to the same shuffle group.

    val shuffle3 = shuffleBlockManager.forMapTask(1, 3, 1, new JavaSerializer(testConf),
      new ShuffleWriteMetrics)
    for (writer <- shuffle3.writers) {
      writer.write("test3")
      writer.write("test4")
    }
    for (writer <- shuffle3.writers) {
      writer.commitAndClose()
    }
    // check before we register.
    checkSegments(shuffle2Segment, shuffleBlockManager.getBlockData(ShuffleBlockId(1, 2, 0)))
    shuffle3.releaseWriters(success = true)
    checkSegments(shuffle2Segment, shuffleBlockManager.getBlockData(ShuffleBlockId(1, 2, 0)))
    shuffleBlockManager.removeShuffle(1)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 83
Source File: DiskBlockManagerSuite.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.mockito.Mockito.{mock, when}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach, FunSuite}

import org.apache.spark.SparkConf
import org.apache.spark.util.Utils

class DiskBlockManagerSuite extends FunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  val blockManager = mock(classOf[BlockManager])
  when(blockManager.conf).thenReturn(testConf)
  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    super.afterAll()
    Utils.deleteRecursively(rootDir0)
    Utils.deleteRecursively(rootDir1)
  }

  override def beforeEach() {
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(blockManager, conf)
  }

  override def afterEach() {
    diskBlockManager.stop()
  }

  test("basic block creation") {
    val blockId = new TestBlockId("test")
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 84
Source File: ZkWatcher.scala    From Adenium   with Apache License 2.0 5 votes vote down vote up
package com.adenium.externals.zookeeper

import java.nio.charset.StandardCharsets

import com.adenium.externals.zookeeper.ZkUtil.setPersistent
import com.adenium.utils.Logger
import com.adenium.utils.May._
import org.apache.curator.framework.CuratorFramework
import org.apache.zookeeper.Watcher.Event.EventType
import org.apache.zookeeper.data.Stat
import org.apache.zookeeper.{KeeperException, WatchedEvent, Watcher}

import scala.language.reflectiveCalls



object ZkWatcher {

  def onZkChange(cur: CuratorFramework, path: String)(handler: (String, Stat) => Unit) {

    Logger.logInfo("[  watchNodeOrChidlrenChange ] == zknode : " + path)

    def watcher = new Watcher {
      def process(event: WatchedEvent) {
        Logger.logDebug("[ watchNodeOrChidlrenChange ] == callback invoked " + path + "\ttype: " + event.getType)
        event.getType match {
          case EventType.NodeDataChanged | EventType.NodeChildrenChanged => updated()
          case _ => reset()
        }
      }
    }

    def updated() {
      try {
        val stat = new Stat()
        val msg = cur.getData.storingStatIn(stat).forPath(path)

        setPersistent(cur, path, "")

        val str = new String(msg, StandardCharsets.UTF_8)

        if (str.nonEmpty) {
          state("[ Watching ] == arrived msg: " + new String(msg, StandardCharsets.UTF_8))
          handler(str, stat)
        }

        if (str.startsWith("stop zkctrl")) {
          Logger.logWarning("[ Watching ] == stopped by 'stop zkctrl' message : path =" + path)
        } else {
          /// create and attach next msg watcher
          cur.checkExists.usingWatcher(watcher).forPath(path)
        }

      } catch {
        case e: KeeperException =>
          Logger.logWarning("[ watchNodeOrChidlrenChange ] == read node: " + path + "\te: " + e)
          reset()
      }
    }

    def reset() {
      setPersistent(cur, path, "")
      updated()
    }

    reset()
  }

} 
Example 85
Source File: ComposeFreeMonads.scala    From Freasy-Monad   with MIT License 5 votes vote down vote up
package examples.scalaz

import scalaz._
import scalaz.Id.Id
import freasymonad.scalaz.free
import scala.collection.mutable.ListBuffer
import scala.io.StdIn
import scala.language.{higherKinds, reflectiveCalls}

// example based off https://github.com/typelevel/cats/blob/master/docs/src/main/tut/datatypes/freemonad.md#composing-free-monads-adts
object ComposeFreeMonads extends App {

  @free trait Interact {
    type InteractF[A] = Free[Adt, A]
    sealed trait Adt[A]
    def ask(prompt: String): InteractF[String]
    def tell(msg: String): InteractF[Unit]
  }

  @free trait DataSource {
    type DataSourceF[A] = Free[Adt, A]
    sealed trait Adt[A]
    def addCat(a: String): DataSourceF[Unit]
    def getAllCats: DataSourceF[List[String]]
    def addAndGetAllCats(a: String): DataSourceF[List[String]] =
     for {
       _ <- addCat(a)
       c <- getAllCats
     } yield c
  }

  type ScalazApp[A] = Coproduct[DataSource.Adt, Interact.Adt, A]

  // program1 and program2 are the same.
  // This library lets you choose which style you like.

  def program1(implicit I: Interact.Injects[ScalazApp], D : DataSource.Injects[ScalazApp]): Free[ScalazApp, Unit] = {
    import I._, D._
    for {
      cat  <- ask("What's the kitty's name?")
      cats <- addAndGetAllCats(cat)
      _    <- tell(cats.toString)
    } yield ()
  }

  val program2: Free[ScalazApp, Unit] = {
    import Interact.injectOps._, DataSource.injectOps._
    for {
      cat  <- ask[ScalazApp]("What's the kitty's name?")
      cats <- addAndGetAllCats[ScalazApp](cat)
      _    <- tell[ScalazApp](cats.toString)
    } yield ()
  }

  val consoleCats = new Interact.Interp[Id] {
    def ask(prompt: String): Id[String] = {
      println(prompt)
      StdIn.readLine()
    }
    def tell(msg: String): Id[Unit] = println(msg)
  }

  val inMemoryDatasource = new DataSource.Interp[Id] {
    private[this] val memDataSet = new ListBuffer[String]
    def addCat(a: String): Id[Unit] = memDataSet.append(a)
    def getAllCats: Id[List[String]] = memDataSet.toList
  }

  // scalaz lacks a convenient `or` atm
  // https://github.com/scalaz/scalaz/issues/1222
  implicit class NaturalTransformationOps[F[_], G[_]](val self: F ~> G) extends AnyVal {
    def or[H[_]](g: H ~> G): ({type λ[α] = Coproduct[F, H, α]})#λ ~> G =
      new (({type λ[α] = Coproduct[F, H, α]})#λ ~> G) {
        def apply[A](fa: Coproduct[F, H, A]): G[A] = fa.run.fold(self.apply, g.apply)
      }
  }

  val interpreter = inMemoryDatasource or consoleCats

  program1.foldMap(interpreter)
  program2.foldMap(interpreter)
} 
Example 86
Source File: TravisScalaJs.scala    From sbt-best-practice   with Apache License 2.0 5 votes vote down vote up
package com.thoughtworks.sbtBestPractice.travis

import sbt._
import org.scalajs.sbtplugin.ScalaJSPlugin
import scala.language.reflectiveCalls


object TravisScalaJs extends AutoPlugin {
  private def reflectiveLinkerSetting[
      StandardConfig <: {
        def withBatchMode(batchMode: Boolean): StandardConfig
      }
  ](key: SettingKey[StandardConfig]): Def.Setting[StandardConfig] = {
    key := {
      key.value.withBatchMode(Travis.travisRepoSlug.?.value.isDefined)
    }
  }

  private def scalaJSLinkerConfig06[
      StandardConfig <: {
        def withBatchMode(batchMode: Boolean): StandardConfig
      }
  ] = {
    // The type of ScalaJSPlugin v0.6
    type ScalaJSPlugin06 = {
      def autoImport: {
        def scalaJSLinkerConfig: SettingKey[StandardConfig]
      }
    }
    ScalaJSPlugin.asInstanceOf[ScalaJSPlugin06].autoImport.scalaJSLinkerConfig
  }

  override def projectSettings: Seq[Def.Setting[_]] = {
    Seq(try {
      // sbt-scalajs 1.x
      reflectiveLinkerSetting(ScalaJSPlugin.autoImport.scalaJSLinkerConfig)
    } catch {
      case _: NoClassDefFoundError =>
        // sbt-scalajs 0.6.x
        reflectiveLinkerSetting(scalaJSLinkerConfig06)
    })
  }

  override def requires = {
    try {
      ScalaJSPlugin && Travis
    } catch {
      case _: NoClassDefFoundError =>
        Travis
    }
  }

  override def trigger = {
    if (requires == Travis) {
      noTrigger
    } else {
      allRequirements
    }
  }
} 
Example 87
Source File: JComponentExt.scala    From intellij-lsp   with Apache License 2.0 5 votes vote down vote up
package org.jetbrains.plugins.scala.extensions

import java.awt.event.{ActionEvent, ActionListener}
import javax.swing.JComponent

import scala.language.reflectiveCalls


object JComponentExt {
  private type WithAddActionListener = JComponent {
    def addActionListener(al: ActionListener)
  }

  //todo: replace with SAM after migration to scala 2.12
  implicit class ActionListenersOwner(val jc: WithAddActionListener) extends AnyVal {
    def addActionListenerEx(body: => Unit): Unit = jc.addActionListener(new ActionListener {
      override def actionPerformed(e: ActionEvent): Unit = body
    })
  }
} 
Example 88
Source File: CsvDf.scala    From scala-course   with GNU General Public License v3.0 5 votes vote down vote up
object CsvDf {

  def main(args: Array[String]): Unit = {

    import org.saddle.Index
    import org.saddle.io._

    val file = CsvFile("../r/cars93.csv")
    val df = CsvParser.parse(file).withColIndex(0)
    println(df)
    val df2 = df.rfilter(_("EngineSize").mapValues(CsvParser.parseDouble).at(0)<=4.0)
    println(df2)
    val wkg=df2.col("Weight").mapValues(CsvParser.parseDouble).mapValues(_*0.453592).setColIndex(Index("WeightKG"))
    val df3=df2.joinPreserveColIx(wkg.mapValues(_.toString))
    println(df3)

    import CsvImplicits._
    import scala.language.reflectiveCalls
    df3.writeCsvFile("saddle-out.csv")

  }

} 
Example 89
Source File: UpdateEclipseClasspaths.scala    From incubator-daffodil   with Apache License 2.0 5 votes vote down vote up
import scala.xml._
import java.io.PrintStream

import scala.language.reflectiveCalls


    val fixedCpNode = XML.loadString(pp.format(cpNode))
    val cpes = (fixedCpNode \\ "classpathentry")
    val newEntries = cpes :+
      <classpathentry combineaccessrules="false" kind="src" path="/daffodil-macro-lib"/>
    val newCP =
      <classpath>
        <!-- This file is updated by the UpdateEclipseClasspath app. -->
        { newEntries }
      </classpath>
    writeXMLFile(newCP, cpf.toString)
  }

  def writeXML(xml: Node, out: { def print(s: String): Unit } = System.out): Unit = {
    val formattedSpec = pp.format(xml)
    out.print("<?xml version='1.0' encoding='UTF-8'?>\n")
    out.print("\n")
    out.print(formattedSpec)
    out.print("\n")
  }

  def writeXMLFile(xml: Node, outputFilename: String): Unit = {
    val f = new java.io.File(outputFilename)
    f.getParentFile().mkdirs()
    val ps = new PrintStream(f)
    writeXML(xml, ps)
    ps.close()
  }
} 
Example 90
Source File: Implicits.scala    From incubator-daffodil   with Apache License 2.0 5 votes vote down vote up
package org.apache.daffodil

import java.io.{ ByteArrayInputStream, BufferedInputStream }

import org.apache.daffodil.xml.NS
import org.apache.daffodil.exceptions.Assert
import scala.language.reflectiveCalls
import scala.language.implicitConversions
import scala.language.{ implicitConversions, reflectiveCalls } // silences scala 2.10 warnings

object Implicits {

  object ImplicitsSuppressUnusedImportWarning {
    def apply() = if (scala.math.random.isNaN()) Assert.impossible()
  }

  
  def intercept[T <: AnyRef](body: => Any)(implicit tag: scala.reflect.ClassTag[T]): T = {
    val clazz = tag.runtimeClass.asInstanceOf[Class[T]]
    val caught = try {
      body
      None
    } catch {
      case npe: NullPointerException => throw npe
      case s: scala.util.control.ControlThrowable => throw s
      case u: Throwable => {
        if (!clazz.isAssignableFrom(u.getClass)) {
          throw new InterceptFailedException(
            "Failed to intercept expected exception. Expected '%s' but got '%s'.".format(clazz.getName, u.getClass.getName))
        } else {
          Some(u)
        }
      }
    }
    caught match {
      case None => throw new InterceptFailedException("Failed to intercept any exceptions.")
      case Some(e) => e.asInstanceOf[T]
    }
  }

  class InterceptFailedException(msg: String) extends RuntimeException(msg)

} 
Example 91
Source File: PrivacyPolicyModal.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.modals

import client.LGMain.{ Loc }
import scala.util.{ Failure, Success }
import scalacss.ScalaCssReact._
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import client.components.Bootstrap._
import client.components._
import client.css.{ DashBoardCSS }
import scala.language.reflectiveCalls
import org.querki.jquery._

object PrivacyPolicyModal {
  // shorthand for styles
  @inline private def bss = GlobalStyles.bootstrapStyles
  case class Props(submitHandler: () => Callback)

  case class State()

  case class Backend(t: BackendScope[Props, State]) {
    def hide = Callback {
      // instruct Bootstrap to hide the modal
      $(t.getDOMNode()).modal("hide")
    }
    def hideModal = {
      // instruct Bootstrap to hide the modal
      $(t.getDOMNode()).modal("hide")
    }
    def mounted(props: Props): Callback = Callback {
    }
    def submitForm(e: ReactEventI) = {
      e.preventDefault()
    }
    def formClosed(state: State, props: Props): Callback = {
      // call parent handler with the new item and whether form was OK or cancelled
      props.submitHandler()
    }

    def render(s: State, p: Props) = {
      val headerText = "Privacy Policy"
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(<.button(^.tpe := "button", bss.close, ^.onClick --> hide, Icon.close), <.div(DashBoardCSS.Style.modalHeaderText)(headerText)),
          // this is called after the modal has been hidden (animation is completed)
          closed = () => formClosed(s, p)
        ),
        //  <.form(^.onSubmit ==> submitForm)(
        <.div(^.className := "row", DashBoardCSS.Style.MarginLeftchkproduct)(
          <.div(DashBoardCSS.Style.marginTop10px)(),
          <.div()(
            "Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum."
          )
        ),
        <.div()(
          <.div(DashBoardCSS.Style.modalHeaderPadding, ^.className := "text-right")( //              <.button(^.tpe := "button",^.className:="btn btn-default", DashBoardCSS.Style.marginLeftCloseBtn, ^.onClick --> hide,"Post"),
          //              <.button(^.tpe := "button",^.className:="btn btn-default", DashBoardCSS.Style.marginLeftCloseBtn, ^.onClick --> hide,"Cancel")
          )
        ),
        <.div(bss.modal.footer, DashBoardCSS.Style.marginTop10px, DashBoardCSS.Style.marginLeftRight)()
      // )
      )
    }
  }
  private val component = ReactComponentB[Props]("PrivacyPolicy")
    .initialState_P(p => State())
    .renderBackend[Backend]
    .build
  def apply(props: Props) = component(props)
} 
Example 92
Source File: ConfirmAccountCreation.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.modals

import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import client.components.Bootstrap._
import client.components._
import client.css.DashBoardCSS
import client.sessionitems.SessionItems
import shared.models.EmailValidationModel

import scala.language.reflectiveCalls
import scalacss.ScalaCssReact._
import org.querki.jquery._
import diode.AnyAction._
import org.querki.jquery._
import org.scalajs.dom
import org.scalajs.dom._

object ConfirmAccountCreation {
  @inline private def bss = GlobalStyles.bootstrapStyles

  case class Props(submitHandler: (EmailValidationModel, Boolean) => Callback)

  case class State(emailValidationModel: EmailValidationModel, accountValidationFailed: Boolean = false,
                   hostName: String = s"https://${dom.window.location.hostname}:9876")

  class Backend(t: BackendScope[Props, State]) {
    def submitForm(e: ReactEventI) = {
      e.preventDefault()
      window.sessionStorage.setItem(SessionItems.ApiDetails.API_URL, t.state.runNow().hostName)
      // mark it as NOT cancelled (which is the default)
      t.modState(s => s.copy(accountValidationFailed = true))
    }


    def updateIp(e: ReactEventI) = {
      val value = e.target.value
      //      println(s"value:$value")
      t.modState(s => s.copy(hostName = value))
    }

    def hide = {
      // instruct Bootstrap to hide the modal
      $(t.getDOMNode()).modal("hide")
    }

    def updateToken(e: ReactEventI) = {
      // update TodoItem content
      val value = e.target.value
      t.modState(s => s.copy(emailValidationModel = s.emailValidationModel.copy(token = value)))
    }

    def formClosed(state: State, props: Props): Callback = {
      // call parent handler with the new item and whether form was OK or cancelled
      props.submitHandler(state.emailValidationModel, state.accountValidationFailed)
    }

    def render(s: State, p: Props) = {
      // log.debug(s"User is ${if (s.item.id == "") "adding" else "editing"} a todo")
      val headerText = "Confirm Account Creation"
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(<.button(^.tpe := "button", bss.close, ^.onClick --> hide, Icon.close), <.div(DashBoardCSS.Style.modalHeaderText)(headerText)),
          closed = () => formClosed(s, p)
        ),
        <.form(^.onSubmit ==> submitForm)(
          <.div(^.className := "row")(
            <.div(^.className := "col-md-12 col-sm-12 col-xs-12")(
              <.div(DashBoardCSS.Style.scltInputModalContainerMargin)(
                // <.div(DashBoardCSS.Style.modalHeaderFont)("Confirm Account Creation"),
                <.h5("After registration, you were emailed a confirmation code. Please enter the code below"),
                <.div(^.className := "form-group")(
                  <.input(^.tpe := "text", bss.formControl, DashBoardCSS.Style.inputModalMargin, ^.id := "Name",
                    ^.placeholder := "username", ^.value := s.hostName, ^.onChange ==> updateIp, ^.required := true)),
                <.input(^.tpe := "text", bss.formControl, DashBoardCSS.Style.inputModalMargin, DashBoardCSS.Style.marginTop10px,
                  ^.id := "Name", ^.placeholder := "Enter validation code", ^.value := s.emailValidationModel.token, ^.onChange ==> updateToken),
                <.button(^.tpe := "submit", ^.className := "btn", DashBoardCSS.Style.btnWidth, DashBoardCSS.Style.btnBackground, "Confirm")
              ),
              <.div(bss.modal.footer, DashBoardCSS.Style.marginTop5p, DashBoardCSS.Style.marginLeftRight)()
            )
          )
        )
      )
    }
  }

  private val component = ReactComponentB[Props]("ConfirmAccountCreation")
    .initialState_P(p => State(new EmailValidationModel("")))
    .renderBackend[Backend]
    .componentDidUpdate(scope => Callback {
      if (scope.currentState.accountValidationFailed) {
        scope.$.backend.hide
      }
    })
    .build

  def apply(props: Props) = component(props)
} 
Example 93
Source File: EndUserAgreement.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.modals

import shared.models.{ EmailValidationModel, UserModel }

import client.LGMain.{ Loc }
import scala.util.{ Failure, Success }
import scalacss.ScalaCssReact._
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import client.components.Bootstrap._
import client.components._
import client.css.{ HeaderCSS, DashBoardCSS, ProjectCSS, MessagesCSS }
import scala.language.reflectiveCalls
import org.querki.jquery._

object EndUserAgreement {
  // shorthand for styles
  @inline private def bss = GlobalStyles.bootstrapStyles
  case class Props(submitHandler: () => Callback)
  case class State()
  case class Backend(t: BackendScope[Props, State])  {
    def hide = Callback {
      // instruct Bootstrap to hide the modal
      $(t.getDOMNode()).modal("hide")
    }
    def mounted(props: Props): Callback = Callback {
    }

    def submitForm(e: ReactEventI) = {
      e.preventDefault()
    }

    def formClosed(state: State, props: Props): Callback = {
      // call parent handler with the new item and whether form was OK or cancelled
      props.submitHandler()
    }

    def render(s: State, p: Props) = {
      val headerText = "End User Agreement"
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(<.button(^.tpe := "button", bss.close, ^.onClick --> hide, Icon.close), <.div(DashBoardCSS.Style.modalHeaderText)(headerText)),
          // this is called after the modal has been hidden (animation is completed)
          closed = () => formClosed(s, p)
        ),
        <.div(^.className := "row")( // <.div(^.className:="col-md-12 col-sm-12")(<.div(DashBoardCSS.Style.modalHeaderFont,MessagesCSS.Style.paddingLeftModalHeaderbtn)("End User Agreement"))
        ), //main row
        <.div(^.className := "row", DashBoardCSS.Style.MarginLeftchkproduct)(
          <.div(DashBoardCSS.Style.marginTop10px)(),
          <.div()(
            "Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum."
          )
        ),
        <.div(bss.modal.footer, DashBoardCSS.Style.marginTop10px, DashBoardCSS.Style.marginLeftRight)()
      )
    }
  }
  private val component = ReactComponentB[Props]("EndUserAgreement")
    .initialState_P(p => State())
    .renderBackend[Backend]
    .build
  def apply(props: Props) = component(props)
} 
Example 94
Source File: TermsOfServices.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.modals

import client.LGMain.{ Loc }
import scala.util.{ Failure, Success }
import scalacss.ScalaCssReact._
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import client.components.Bootstrap._
import client.components._
import client.css.{ DashBoardCSS }
import scala.language.reflectiveCalls
import org.querki.jquery._

object TermsOfServices {
  // shorthand for styles
  @inline private def bss = GlobalStyles.bootstrapStyles
  case class Props(submitHandler: () => Callback)
  case class State()

  case class Backend(t: BackendScope[Props, State]) {
    def hide = Callback {
      // instruct Bootstrap to hide the modal
      $(t.getDOMNode()).modal("hide")
    }
    def mounted(props: Props): Callback = Callback {
    }

    def submitForm(e: ReactEventI) = {
      e.preventDefault()
    }

    def formClosed(state: State, props: Props): Callback = {
      // call parent handler with the new item and whether form was OK or cancelled
      props.submitHandler()
    }

    def render(s: State, p: Props) = {
      val headerText = "Terms of Service"
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(<.button(^.tpe := "button", bss.close, ^.onClick --> hide, Icon.close), <.div(DashBoardCSS.Style.modalHeaderText)(headerText)),
          // this is called after the modal has been hidden (animation is completed)
          closed = () => formClosed(s, p)
        ),
        <.div(^.className := "row")( // <.div(^.className:="col-md-12 col-sm-12")(<.div(DashBoardCSS.Style.modalHeaderFont,MessagesCSS.Style.paddingLeftModalHeaderbtn)("Terms of Service"))
        ), //main row
        <.div(^.className := "row", DashBoardCSS.Style.MarginLeftchkproduct)(
          <.div(DashBoardCSS.Style.marginTop10px)(),
          <.div()(
            "Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum."
          )
        ),
        <.div()(
          <.div(DashBoardCSS.Style.modalHeaderPadding, ^.className := "text-right")(
            <.button(^.tpe := "button", ^.className := "btn",DashBoardCSS.Style.btnDefault, DashBoardCSS.Style.marginLeftCloseBtn, ^.onClick --> hide, "Back")
          //              <.button(^.tpe := "button",^.className:="btn btn-default", DashBoardCSS.Style.marginLeftCloseBtn, ^.onClick --> hide,"Cancel")
          )
        ),
        <.div(bss.modal.footer, DashBoardCSS.Style.marginTop10px, DashBoardCSS.Style.marginLeftRight)()
      )
    }
  }
  private val component = ReactComponentB[Props]("TermsofService")
    .initialState_P(p => State())
    .renderBackend[Backend]
    .build
  def apply(props: Props) = component(props)
} 
Example 95
Source File: LegalModal.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.modals

import shared.models.{ EmailValidationModel, UserModel }
import client.LGMain.{ Loc }
import org.scalajs.dom._
import scala.util.{ Failure, Success }
import scalacss.ScalaCssReact._
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import client.components.Bootstrap._
import client.components._
import client.css._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.language.reflectiveCalls
import org.querki.jquery._

object LegalModal { //TodoForm
  @inline private def bss = GlobalStyles.bootstrapStyles

  case class Props(submitHandler: (Boolean, Boolean, Boolean, Boolean, Boolean, Boolean) => Callback)
  case class State(legal: Boolean = false, showPrivacyPolicyModal: Boolean = false,
    showTermsOfServicesForm: Boolean = false, showEndUserAgreementModal: Boolean = false, showTrademarksModal: Boolean = false, showCopyrightModal: Boolean = false)

  class Backend(t: BackendScope[Props, State]) {
    def submitForm(e: ReactEventI) = {
      e.preventDefault()
      t.modState(s => s.copy(legal = true))
    }

    def hide = {
      console.log("hide")
      // instruct Bootstrap to hide the modal
      $(t.getDOMNode()).modal("hide")
    }

    def showPrivacyPolicy(e: ReactEventI) = {
      console.log("in showPrivacyPolicy ")
      t.modState(s => s.copy(showPrivacyPolicyModal = true))
    }
    def showTrademarks(e: ReactEventI) = {
      console.log("in tradeMarks ")
      t.modState(s => s.copy(showTrademarksModal = true))
    }
    def showCopyright(e: ReactEventI) = {
      console.log("in tradeMarks ")
      t.modState(s => s.copy(showCopyrightModal = true))
    }
    def showEndUserAgreement(e: ReactEventI) = {
      t.modState(s => s.copy(showEndUserAgreementModal = true))
    }
    def showTermsOfServices(e: ReactEventI) = {
      t.modState(s => s.copy(showTermsOfServicesForm = true))
    }

    def formClosed(state: State, props: Props): Callback = {
      // call parent handler with the new item and whether form was OK or cancelled
      //println("form closed")
//      println("state.showTrademarksModal : " + state.showTrademarksModal)
      props.submitHandler(state.legal, state.showPrivacyPolicyModal, state.showTermsOfServicesForm, state.showEndUserAgreementModal, state.showTrademarksModal, state.showCopyrightModal)
    }
    def render(s: State, p: Props) = {
      // log.debug(s"User is ${if (s.item.id == "") "adding" else "editing"} a todo")
      val headerText = "Legal"
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(<.button(^.tpe := "button", bss.close, ^.onClick --> hide, Icon.close), <.div(DashBoardCSS.Style.modalHeaderText)(headerText)),
          closed = () => formClosed(s, p)
        ),
        <.form(^.onSubmit ==> submitForm)(
          <.div(^.className := "row", DashBoardCSS.Style.MarginLeftchkproduct)(
            <.ul()(
              <.li()(<.button(^.tpe := "button", ^.className := "btn",DashBoardCSS.Style.btnDefault, FooterCSS.Style.legalModalBtn, "Privacy Policy", ^.onClick ==> showPrivacyPolicy)),
              <.li()(<.button(^.tpe := "button", ^.className := "btn",DashBoardCSS.Style.btnDefault, FooterCSS.Style.legalModalBtn, "End User Agreement", ^.onClick ==> showEndUserAgreement)),
              <.li()(<.button(^.tpe := "button", ^.className := "btn-link",DashBoardCSS.Style.btnDefault, FooterCSS.Style.legalModalBtn, "Terms of Service", ^.onClick ==> showTermsOfServices)),
              <.li()(<.button(^.tpe := "button", ^.className := "btn",DashBoardCSS.Style.btnDefault, FooterCSS.Style.legalModalBtn, "Trademarks and Credits", ^.onClick ==> showTrademarks)),
              <.li()(<.button(^.tpe := "button", ^.className := "btn",DashBoardCSS.Style.btnDefault, FooterCSS.Style.legalModalBtn, "Copyright", ^.onClick ==> showCopyright))
            )
          ),
          <.div(bss.modal.footer, DashBoardCSS.Style.marginTop5p, DashBoardCSS.Style.marginLeftRight)()
        ) //submitform
      )

    }
  }
  private val component = ReactComponentB[Props]("LegalModal")
    .initialState_P(p => State())
    .renderBackend[Backend]
    .componentDidUpdate(scope => Callback {
      if (scope.currentState.legal || scope.currentState.showPrivacyPolicyModal || scope.currentState.showTermsOfServicesForm || scope.currentState.showEndUserAgreementModal
        || scope.currentState.showTrademarksModal || scope.currentState.showCopyrightModal) {
        scope.$.backend.hide
      }
    })
    .build
  def apply(props: Props) = component(props)
} 
Example 96
Source File: AccountValidationSuccess.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.modals

import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import client.components.Bootstrap._
import client.components._
import client.css.DashBoardCSS
import scala.language.reflectiveCalls
import scalacss.ScalaCssReact._
import org.querki.jquery._

object AccountValidationSuccess {
  @inline private def bss = GlobalStyles.bootstrapStyles
  case class Props(submitHandler: () => Callback)
  case class State()

  class Backend(t: BackendScope[Props, State]) {
    def hide = Callback {
      $(t.getDOMNode()).modal("hide")
    }
    def formClosed(state: State, props: Props): Callback = {
      // call parent handler with the new item and whether form was OK or cancelled
      props.submitHandler()
    }

    def render(s: State, p: Props) = {
      // log.debug(s"User is ${if (s.item.id == "") "adding" else "editing"} a todo")
      val headerText = "Account Validation Success"
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(<.div(DashBoardCSS.Style.modalHeaderText)(headerText)),
          closed = () => formClosed(s, p)
        ),
        <.div(^.className := "row")(
          <.div(^.className := "col-md-12 col-sm-12 col-xs-12")(
            <.div(^.className := "row")(
              <.div(DashBoardCSS.Style.scltInputModalContainerMargin)(
                <.div(DashBoardCSS.Style.modalBodyText)(
                  "Account Validation Successful!",
                  <.div(  )(<.button(^.tpe := "button", ^.className := "btn", DashBoardCSS.Style.btnBackground, ^.onClick --> hide)("Login"))
                )
              )
            )
          )
        ),
        <.div(bss.modal.footer, DashBoardCSS.Style.marginTop5p, DashBoardCSS.Style.marginLeftRight)()
      )
    }
  }
  private val component = ReactComponentB[Props]("AccountValidationSuccessful")
    .initialState_P(p => State())
    .renderBackend[Backend]
    .build

  def apply(props: Props) = component(props)
} 
Example 97
Source File: RegistrationFailed.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.modals

import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import client.components.Bootstrap._
import client.components._
import client.css.DashBoardCSS
import scalacss.ScalaCssReact._
import scala.language.reflectiveCalls
import org.querki.jquery._

object RegistrationFailed {
  // shorthand fo
  @inline private def bss = GlobalStyles.bootstrapStyles
  case class Props(submitHandler: (Boolean) => Callback, errorMsg: String = "")
  case class State(registrationFailed: Boolean = false)

  class Backend(t: BackendScope[Props, State]) {

    def hide = Callback {
      $(t.getDOMNode()).modal("hide")
    }
    def login(): Callback = {
      t.modState(s => s.copy(registrationFailed = true))
    }

    def modalClosed(state: State, props: Props): Callback = {
      props.submitHandler(state.registrationFailed)
    }

    def render(s: State, p: Props) = {
      val headerText = "Error"

      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(<.div(DashBoardCSS.Style.modalHeaderText)(headerText)),

          closed = () => modalClosed(s, p)
        ),

        <.div(^.className := "row")(
          <.div(^.className := "col-md-12 col-sm-12 col-xs-12")(
            <.div(^.className := "row")(
              <.div(DashBoardCSS.Style.scltInputModalContainerMargin)(
                <.div(DashBoardCSS.Style.modalBodyText)(
                  p.errorMsg,
                  <.div(DashBoardCSS.Style.modalContentFont)(<.button(^.tpe := "button", ^.className := "btn", DashBoardCSS.Style.btnBackground, ^.onClick --> hide)("Try again"), <.button(^.tpe := "button", DashBoardCSS.Style.MarginLeftchkproduct,DashBoardCSS.Style.btnDefault, ^.className := "btn", ^.onClick --> login)("Login"))

                )
              )
            )
          )
        ),
        <.div(bss.modal.footer, DashBoardCSS.Style.marginTop5p, DashBoardCSS.Style.marginLeftRight)()
      )
    }
  }
  private val component = ReactComponentB[Props]("RegistrationFailed")
    .initialState_P(p => State())
    .renderBackend[Backend]
    .componentDidUpdate(scope => Callback {
      if (scope.currentState.registrationFailed) {
        scope.$.backend.hide
      }
    })
    .build

  def apply(props: Props) = component(props)
} 
Example 98
Source File: LoginFailed.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.modals

import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import client.components.Bootstrap._
import client.components._
import client.css.DashBoardCSS
import scalacss.ScalaCssReact._
import scala.language.reflectiveCalls
import org.querki.jquery._

object LoginFailed {
  // shorthand fo
  @inline private def bss = GlobalStyles.bootstrapStyles
  case class Props(submitHandler: () => Callback, loginErrorMessage: String = "")
  case class State()
  class Backend(t: BackendScope[Props, State]) {

    def hide = Callback {
      $(t.getDOMNode()).modal("hide")
    }

    def formClosed(state: State, props: Props): Callback = {
      // call parent handler with the new item and whether form was OK or cancelled
      props.submitHandler()
    }

    def render(s: State, p: Props) = {
      // log.debug(s"User is ${if (s.item.id == "") "adding" else "editing"} a todo")
      val headerText = "Login Failed"
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(<.div(DashBoardCSS.Style.modalHeaderText)(headerText)),
          closed = () => formClosed(s, p)
        ),

        <.div(^.className := "row")(
          <.div(^.className := "col-md-12 col-sm-12 col-xs-12")(
            <.div(^.className := "row")(
              <.div(DashBoardCSS.Style.scltInputModalContainerMargin)(
                <.div(DashBoardCSS.Style.modalBodyText)(
                   "The username and password combination that you are using is not correct. Please check and try again.",
                  <.div(DashBoardCSS.Style.modalContentFont)(<.button(^.tpe := "button", ^.className := "btn", DashBoardCSS.Style.btnBackground, ^.onClick --> hide)("Try again"))
                )
              )
            )
          )
        ),
        <.div(bss.modal.footer, DashBoardCSS.Style.marginTop5p, DashBoardCSS.Style.marginLeftRight)()
      )
    }
  }
  private val component = ReactComponentB[Props]("LoginFailed")
    .initialState_P(p => State())
    .renderBackend[Backend]
    .build

  def apply(props: Props) = component(props)
} 
Example 99
Source File: ServerErrorModal.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.modals

import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import client.components.Bootstrap._
import client.components._
import client.css.DashBoardCSS
import client.services.LGCircuit

import scala.language.reflectiveCalls
import scalacss.ScalaCssReact._
import org.querki.jquery._

object ServerErrorModal {
  // shorthand fo
  @inline private def bss = GlobalStyles.bootstrapStyles

  case class Props(submitHandler: (Boolean) => Callback, msg: String)
  case class State(showLoginForm:Boolean=false)
  class Backend(t: BackendScope[Props, State]) {
    def closeForm() =  {
      $(t.getDOMNode()).modal("hide")
      t.modState(s => s.copy(showLoginForm = true))
    }

    def modalClosed(state: State, props: Props): Callback = {
      props.submitHandler(state.showLoginForm)
    }

    def addLoginDetails(): Callback = Callback{
      $(t.getDOMNode()).modal("hide")
      //t.modState(s => s.copy(showApiDetailsForm = false, showLoginForm = true))
    }

    def render(s: State, p: Props) = {
      val headerText = "Error"
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(<.div(DashBoardCSS.Style.modalHeaderText)(headerText)),

          closed = () => modalClosed(s, p)
        ),

        <.div(^.className := "row")(
          <.div(^.className := "col-md-12 col-sm-12 col-xs-12")(
            <.div(^.className := "row")(
              <.div(DashBoardCSS.Style.scltInputModalContainerMargin)(
                <.div(DashBoardCSS.Style.modalBodyText)(
                  s"We are currently encountering problems in serving your request. ${p.msg}",
                  <.div(DashBoardCSS.Style.modalContentFont)(<.button(^.tpe := "button", ^.className := "btn",DashBoardCSS.Style.btnDefault,DashBoardCSS.Style.btnBackground, ^.onClick --> closeForm)("Close")
                   // ApiDetailsForm(ApiDetailsForm.Props(addLoginDetails))

                  )
//                    ApiDetailsForm(ApiDetailsForm.Props(addLoginDetails))
                )
              )
            )
          )
        ),
        <.div(bss.modal.footer, DashBoardCSS.Style.marginTop5p, DashBoardCSS.Style.marginLeftRight)()
      )
    }
  }
  private val component = ReactComponentB[Props]("ErrorModal")
    .initialState_P(p => State())
    .renderBackend[Backend]
    .build

  def apply(props: Props) = component(props)
} 
Example 100
Source File: CopyrightModal.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.modals

import client.components.Bootstrap.Modal
import client.components.GlobalStyles
import client.components.Icon
import shared.models.{ EmailValidationModel, UserModel }
import client.LGMain.{ Loc }
import scala.util.{ Failure, Success }
import scalacss.ScalaCssReact._
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import client.components.Bootstrap._
import client.components._
import client.css.{ DashBoardCSS }
import scala.language.reflectiveCalls
import org.querki.jquery._

object CopyrightModal {
  // shorthand for styles
  @inline private def bss = GlobalStyles.bootstrapStyles
  case class Props(submitHandler: () => Callback)
  case class State()

  case class Backend(t: BackendScope[Props, State]) {
    def hide = Callback {
      // instruct Bootstrap to hide the modal
      $(t.getDOMNode()).modal("hide")
    }
    def mounted(props: Props): Callback = Callback {
    }

    def submitForm(e: ReactEventI) = {
      e.preventDefault()
    }

    def formClosed(state: State, props: Props): Callback = {
      // call parent handler with the new item and whether form was OK or cancelled
      props.submitHandler()
    }

    def render(s: State, p: Props) = {

      val headerText = "Copyright"
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(<.button(^.tpe := "button", bss.close, ^.onClick --> hide, Icon.close), <.div(DashBoardCSS.Style.modalHeaderText)(headerText)),
          // this is called after the modal has been hidden (animation is completed)
          closed = () => formClosed(s, p)
        ),
        <.div(^.className := "row", DashBoardCSS.Style.MarginLeftchkproduct)(
          <.div(DashBoardCSS.Style.marginTop10px)(),
          <.div()(
            "Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum."
          )
        ),
        <.div()(
          <.div(DashBoardCSS.Style.modalHeaderPadding, ^.className := "text-right")( //              <.button(^.tpe := "button",^.className:="btn btn-default", DashBoardCSS.Style.marginLeftCloseBtn, ^.onClick --> hide,"Post"),
          //              <.button(^.tpe := "button",^.className:="btn btn-default", DashBoardCSS.Style.marginLeftCloseBtn, ^.onClick --> hide,"Cancel")
          )
        ),
        <.div(bss.modal.footer, DashBoardCSS.Style.marginTop10px, DashBoardCSS.Style.marginLeftRight)()
      )
    }
  }
  private val component = ReactComponentB[Props]("Copyright")
    .initialState_P(p => State())
    .renderBackend[Backend]
    .build
  def apply(props: Props) = component(props)
} 
Example 101
Source File: AccountValidationFailed.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package client.modals

import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import client.components.Bootstrap._
import client.components._
import client.css.DashBoardCSS
import org.querki.jquery._
import scalacss.ScalaCssReact._
import scala.language.reflectiveCalls

object AccountValidationFailed {
  @inline private def bss = GlobalStyles.bootstrapStyles
  case class Props(submitHandler: () => Callback)
  case class State()

  class Backend(t: BackendScope[Props, State]) {
    def hide = Callback {
      $(t.getDOMNode()).modal("hide")
    }

    def modalClosed(state: State, props: Props): Callback = {
      props.submitHandler()
    }

    def render(s: State, p: Props) = {
      val headerText = "Error"
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(  <.div(DashBoardCSS.Style.modalHeaderText)(headerText)),

          closed = () => modalClosed(s, p)
        ),

        <.div(^.className := "row")(
          <.div(^.className := "col-md-12 col-sm-12 col-xs-12")(
            <.div(^.className := "row")(
              <.div(DashBoardCSS.Style.scltInputModalContainerMargin)(
                <.div(DashBoardCSS.Style.modalBodyText)(
                  "Validation code you entered is incorrect, please check your email and enter valid code",
                  <.div(DashBoardCSS.Style.modalContentFont)(<.button(^.tpe := "button", ^.className := "btn", DashBoardCSS.Style.btnBackground, ^.onClick --> hide)("Try again"))
                )
              )
            )
          )
        ),
        <.div(bss.modal.footer, DashBoardCSS.Style.marginTop5p, DashBoardCSS.Style.marginLeftRight)()
      )
    }
  }
  private val component = ReactComponentB[Props]("AccountValidationFailed")
    .initialState_P(p => State())
    .renderBackend[Backend]
    .build

  def apply(props: Props) = component(props)
} 
Example 102
Source File: LoginErrorModal.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package synereo.client.modalpopups

import diode.{ModelR, ModelRO}
import japgolly.scalajs.react.vdom.prefix_<^._
import synereo.client.components.GlobalStyles
import synereo.client.css.SynereoCommanStylesCSS

import scala.language.reflectiveCalls
import scalacss.ScalaCssReact._
import japgolly.scalajs.react._
import synereo.client.components._
import synereo.client.components.Bootstrap._
import synereo.client.services.{RootModel, SYNEREOCircuit}

import scala.scalajs.js


//scalastyle:off
object LoginErrorModal {
  @inline private def bss = GlobalStyles.bootstrapStyles

  case class Props(submitHandler: (Boolean) => Callback, loginError: String = "")

  case class State(showLogin: Boolean = false,
                   lang: js.Dynamic = SYNEREOCircuit.zoom(_.i18n.language).value)


  class LoginErrorBackend(t: BackendScope[Props, State]) {
    def closeForm = {
      jQuery(t.getDOMNode()).modal("hide")
      t.modState(s => s.copy(showLogin = true))
    }

    def modalClosed(state: LoginErrorModal.State, props: LoginErrorModal.Props): Callback = {
      props.submitHandler(state.showLogin)
    }

    def mounted(): Callback = Callback {
      SYNEREOCircuit.subscribe(SYNEREOCircuit.zoom(_.i18n.language))(e => updateLang(e))
    }

    def updateLang(reader: ModelRO[js.Dynamic]) = {
      t.modState(s => s.copy(lang = reader.value)).runNow()
    }

  }


  private val component = ReactComponentB[Props]("ErrorModal")
    .initialState_P(p => State())
    .backend(new LoginErrorBackend(_))
    .renderPS((t, props, state) => {
      val headerText = state.lang.selectDynamic("ERROR").toString
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(<.h4(headerText)),

          closed = () => t.backend.modalClosed(state, props)
        ),
        <.div(^.className := "container-fluid")(
          <.div(^.className := "row")(
            <.div(^.className := "col-md-12 col-sm-12 col-xs-12")(
              <.div(^.className := "row")(
                <.div()(
                  <.h3(SynereoCommanStylesCSS.Style.loginErrorHeading)(props.loginError)
                ),
                <.div(bss.modal.footer, SynereoCommanStylesCSS.Style.errorModalFooter)(
                  <.div(^.className := "row")(
                    <.div(^.className := "col-md-12 text-center")(
                      <.div()(
                        <.h5(state.lang.selectDynamic("API_HOST_UNREACHABLE").toString),
                        <.button(^.tpe := "button", ^.className := "btn btn-default",
                          ^.onClick --> t.backend.closeForm)(state.lang.selectDynamic("CLOSE").toString)
                      )
                    )
                  )
                )
              )
            )
          )
        )
      )
    })
    .componentDidMount(scope => scope.backend.mounted())
    .build

  def apply(props: Props) = component(props)
} 
Example 103
Source File: AccountValidationSuccess.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package synereo.client.modalpopups

import diode.{ModelR, ModelRO}
import synereo.client.components.GlobalStyles
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import synereo.client.components.Bootstrap.Modal
import synereo.client.components._
import synereo.client.css.SignupCSS

import scalacss.ScalaCssReact._
import scala.language.reflectiveCalls
import synereo.client.components.Bootstrap._
import synereo.client.services.{RootModel, SYNEREOCircuit}

import scala.scalajs.js




object AccountValidationSuccess {

  @inline private def bss = GlobalStyles.bootstrapStyles

  case class Props(submitHandler: () => Callback)

  case class State(lang: js.Dynamic = SYNEREOCircuit.zoom(_.i18n.language).value)

  class AccountValidationSuccessBackend(t: BackendScope[Props, State]) {
    def hide = Callback {
      jQuery(t.getDOMNode()).modal("hide")
    }

    def updateLang(reader: ModelRO[js.Dynamic]) = {
      t.modState(s => s.copy(lang = reader.value)).runNow()
    }

    def mounted(props: Props) = Callback {
      SYNEREOCircuit.subscribe(SYNEREOCircuit.zoom(_.i18n.language))(e => updateLang(e))

    }

    def formClosed(state: AccountValidationSuccess.State, props: AccountValidationSuccess.Props): Callback = {
      // call parent handler with the new item and whether form was OK or cancelled
      props.submitHandler()
    }
  }

  private val component = ReactComponentB[Props]("AccountValidationSuccess")
    .initialState_P(p => State())
    .backend(new AccountValidationSuccessBackend(_))
    .renderPS((t, props, state) => {
      // log.debug(s"User is ${if (s.item.id == "") "adding" else "editing"} a todo")
      val headerText = s"${state.lang.selectDynamic("ACCOUNT_VALIDATION_SUCCESS").toString}"
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.div(SignupCSS.Style.accountValidationSuccessText)(headerText),
          closed = () => t.backend.formClosed(state, props)
        ),
        <.div(^.className := "container-fluid")(
          <.div(^.className := "row")(
            <.div(^.className := "col-md-12 col-sm-12 col-xs-12")(
              <.div(^.className := "row")(
                <.div()(
                  <.div(^.className := "pull-right")(
                    <.button(^.tpe := "button", SignupCSS.Style.signUpBtn,
                      ^.className := "btn", ^.onClick --> t.backend.hide, state.lang.selectDynamic("LOGIN").toString)
                  )
                )
              )
            )
          ),
          <.div(bss.modal.footer)()
        )
      )
    })
    .componentDidMount(scope => scope.backend.mounted(scope.props))
    .build

  def apply(props: Props) = component(props)
} 
Example 104
Source File: RegistrationFailed.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package synereo.client.modalpopups

import diode.{ModelR, ModelRO}
import synereo.client.components.GlobalStyles
import synereo.client.css.{LoginCSS, SignupCSS, SynereoCommanStylesCSS}
import japgolly.scalajs.react.vdom.prefix_<^._
import synereo.client.components.Bootstrap.Modal

import scalacss.ScalaCssReact._
import scala.language.reflectiveCalls
import japgolly.scalajs.react._
import synereo.client.components._
import synereo.client.components.Bootstrap._
import synereo.client.services.{RootModel, SYNEREOCircuit}

import scala.scalajs.js


//scalastyle:off
object RegistrationFailed {

  @inline private def bss = GlobalStyles.bootstrapStyles

  case class Props(submitHandler: (Boolean) => Callback, registrationErrorMsg: String = "")

  case class State(registrationFailed: Boolean = false, lang: js.Dynamic = SYNEREOCircuit.zoom(_.i18n.language).value)

  class RegistrationFailedBackend(t: BackendScope[Props, State]) {

    def hide = Callback {
      jQuery(t.getDOMNode()).modal("hide")
    }

    def updateLang(reader: ModelRO[js.Dynamic]) = {
      t.modState(s => s.copy(lang = reader.value)).runNow()
    }

    def mounted(props: Props) = Callback {
      SYNEREOCircuit.subscribe(SYNEREOCircuit.zoom(_.i18n.language))(e => updateLang(e))
    }

    def login(): Callback = {
      jQuery(t.getDOMNode()).modal("hide")
      t.modState(s => s.copy(registrationFailed = true))
    }

    def modalClosed(state: RegistrationFailed.State, props: RegistrationFailed.Props): Callback = {
      props.submitHandler(state.registrationFailed)
    }
  }

  private val component = ReactComponentB[Props]("RegistrationFailed")
    .initialState_P(p => State())
    .backend(new RegistrationFailedBackend(_))
    .renderPS((t, props, state) => {
      val headerText = state.lang.selectDynamic("REGISTRATION_FAILED").toString
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.h4(headerText),
          closed = () => t.backend.modalClosed(state, props), "static", true, addStyles = (Seq(SignupCSS.Style.signUpModalStyle))
        ),
        <.div(^.className := "container-fluid")(
          <.div(^.className := "row")(
            <.div(^.className := "col-md-12 col-sm-12 col-xs-12")(
              <.div(^.className := "row")(
                <.div()(
                  <.div(LoginCSS.Style.message)(props.registrationErrorMsg),
                  <.div(^.className := "pull-right")(<.button(^.tpe := "button", ^.className := "btn", SignupCSS.Style.signUpBtn, ^.onClick --> t.backend.hide)("Try again")),
                  <.div(^.className := "pull-right", SynereoCommanStylesCSS.Style.marginRight15px,
                    <.button(^.tpe := "button", ^.className := "btn", SignupCSS.Style.signUpBtn, ^.onClick --> t.backend.login)("Login")
                  )
                )
              )
            )
          ),
          <.div(bss.modal.footer)()
        )
      )
    })
    .componentDidMount(scope => scope.backend.mounted(scope.props))
    .componentDidUpdate(scope => Callback {
      if (scope.currentState.registrationFailed) {
        scope.$.backend.hide
      }
    })
    .build

  def apply(props: Props) = component(props)

} 
Example 105
Source File: LoginFailed.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package synereo.client.modalpopups



import diode.{ModelR, ModelRO}
import synereo.client.components.GlobalStyles
import japgolly.scalajs.react.vdom.prefix_<^._
import synereo.client.components.Bootstrap.Modal
import synereo.client.css.LoginCSS
import scalacss.ScalaCssReact._
import scala.language.reflectiveCalls
import japgolly.scalajs.react._
import synereo.client.components._
import synereo.client.components.Bootstrap._
import synereo.client.services.{RootModel, SYNEREOCircuit}

import scala.scalajs.js

//scalastyle:off
object LoginFailed {
  // shorthand fo
  @inline private def bss = GlobalStyles.bootstrapStyles

  case class Props(submitHandler: () => Callback, loginErrorMessage: String = "")

  case class State(lang: js.Dynamic = SYNEREOCircuit.zoom(_.i18n.language).value)

  class LoginFailedBackend(t: BackendScope[Props, State]) {

    def hide = Callback {
      jQuery(t.getDOMNode()).modal("hide")
    }
    def mounted(): Callback = Callback {
      SYNEREOCircuit.subscribe(SYNEREOCircuit.zoom(_.i18n.language))(e => updateLang(e))
    }

    def updateLang(reader: ModelRO[js.Dynamic]) = {
      t.modState(s => s.copy(lang = reader.value)).runNow()
    }

    def formClosed(state: LoginFailed.State, props: LoginFailed.Props): Callback = {
      props.submitHandler()
    }

  }

  private val component = ReactComponentB[Props]("LoginFailed")
    .initialState_P(p => State())
    .backend(new LoginFailedBackend(_))
    .renderPS((t, props, state) => {
      val headerText = state.lang.selectDynamic("ERROR").toString
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.h4()(headerText),
          closed = () => t.backend.formClosed(state, props)
        ),
        <.div(^.className := "container-fluid")(
          <.div(^.className := "row")(
            <.div(^.className := "col-md-12 col-sm-12 col-xs-12")(
              <.div(^.className := "row")(
                <.div()(
                  <.h3()(
                    props.loginErrorMessage,
                    <.div(<.button(^.tpe := "button", ^.className := "btn",
                      ^.onClick --> t.backend.hide, LoginCSS.Style.modalTryAgainBtn)(state.lang.selectDynamic("TRY_AGAIN").toString))
                  )
                )
              )
            )
          ),
          <.div(bss.modal.footer)()
        )
      )
    })
    .componentDidMount(scope => scope.backend.mounted())
    .build

  def apply(props: Props) = component(props)
} 
Example 106
Source File: ServerErrorModal.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package synereo.client.modalpopups

import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import synereo.client.components.Bootstrap._
import synereo.client.components._
import synereo.client.components.GlobalStyles
import synereo.client.css.{SynereoCommanStylesCSS}
import synereo.client.handlers.LogoutUser
import synereo.client.services.SYNEREOCircuit
import scala.language.reflectiveCalls
import scalacss.ScalaCssReact._
import diode.AnyAction._

)

  case class State()

  class Backend(t: BackendScope[Props, State]) {
    def closeForm = Callback {
      SYNEREOCircuit.dispatch(LogoutUser())
      jQuery(t.getDOMNode()).modal("hide")
    }

    def modalClosed(state: State, props: Props): Callback = {
      props.submitHandler()
    }
  }

  private val component = ReactComponentB[Props]("ErrorModal")
    .initialState_P(p => State())
    .backend(new Backend(_))
    .renderPS((t, P, S) => {
      val headerText = "Error"
      Modal(
        Modal.Props(
          // header contains a cancel button (X)
          header = hide => <.span(<.div()(headerText)),

          closed = () => t.backend.modalClosed(S, P)
        ),
        <.div(^.className := "container-fluid")(
          <.div(^.className := "row")(
            <.div(^.className := "col-md-12 col-sm-12 col-xs-12")(
              <.div(^.className := "row")(
                <.div()(
                  <.h3(SynereoCommanStylesCSS.Style.loginErrorHeading)(
                    "Encountering problems in serving request: ERR_CONNECTION_REFUSED. Check the server availability."
                  )
                ),
                <.div(bss.modal.footer, SynereoCommanStylesCSS.Style.errorModalFooter)(
                  <.div(^.className := "row")(
                    <.div(^.className := "col-md-12 text-center")(
                      <.div()(<.button(^.tpe := "button", ^.className := "btn btn-default", ^.onClick --> t.backend.closeForm)("Close"))
                    )
                  )
                )
              )
            )
          )
        )
      )
    })
    // .shouldComponentUpdate(scope => scope.currentProps.proxy().isServerError)
    .build

  def apply(props: Props) = component(props)
} 
Example 107
Source File: AccountValidationFailed.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package synereo.client.modalpopups

import diode.{ModelR, ModelRO}
import japgolly.scalajs.react.vdom.prefix_<^._
import synereo.client.components.Bootstrap.Modal
import synereo.client.css.SignupCSS

import scalajs.js
import scalacss.ScalaCssReact._
import scala.language.reflectiveCalls
import japgolly.scalajs.react._
import synereo.client.components._
import synereo.client.components.Bootstrap._
import synereo.client.services.{RootModel, SYNEREOCircuit}

import scala.scalajs.js

 <.div(SignupCSS.Style.signUpHeading)(headerText)),
          closed = () => t.backend.formClosed(state, props)
        ),
        <.div(^.className := "container-fluid")(
          <.div(^.className := "row")(
            <.div(^.className := "col-md-12 col-sm-12 col-xs-12")(
              <.div()(
                <.div()(^.fontSize := "18.px", state.lang.selectDynamic("VALIDATION_CODE_IS_INCORRECT").toString),
                <.div(^.className := "pull-right")(
                  <.button(^.tpe := "button", SignupCSS.Style.signUpBtn, ^.className := "btn", ^.onClick --> t.backend.hide, state.lang.selectDynamic("TRY_AGAIN").toString)
                )
              ),
              <.div(bss.modal.footer)()
            )
          )
        )
      )
    })
    .componentDidMount(scope => scope.backend.mounted())
    .build

  def apply(props: Props) = component(props)
} 
Example 108
Source File: SearchComponent.scala    From ProductWebUI   with Apache License 2.0 5 votes vote down vote up
package synereo.client.components

import synereo.client.handlers._
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.prefix_<^._
import synereo.client.css.SynereoCommanStylesCSS
import synereo.client.services.SYNEREOCircuit
import diode.AnyAction._
import shared.dtos._
import shared.models.Label
import synereo.client.sessionitems.SessionItems
import org.scalajs.dom

import scalacss.ScalaCssReact._
import synereo.client.utils
import synereo.client.utils.{AppUtils, ConnectionsUtils, ContentUtils, LabelsUtils}

import scala.language.reflectiveCalls

//scalastyle:off
object SearchComponent {

  case class Props()

  case class State(connectionsSelectizeInputId: String = "SearchComponentCnxnSltz" )

  val searchesProxy = SYNEREOCircuit.connect(_.searches)

  class Backend(t: BackendScope[Props, State]) {
    def mounted(props: Props) = Callback {
    }

    def fromSelecize(): Callback = Callback {}

    def searchWithLblAndCnxn(e: ReactEventI) = Callback {
      val (cnxns, labels) = ConnectionsLabelsSelectize
        .getCnxnsAndLabelsFromSelectize(t.state.runNow().connectionsSelectizeInputId)
      val cnxnToPost = ConnectionsUtils.getCnxnForReq(cnxns)
      val searchLabels = LabelsUtils.buildProlog(
        Seq(Label(text = AppUtils.MESSAGE_POST_LABEL)) ++ labels.map(currentLabel => Label(text = currentLabel)
        ), LabelsUtils.PrologTypes.Each)
      val expr = Expression("feedExpr", ExpressionContent(cnxnToPost, searchLabels))
      //      SYNEREOCircuit.dispatch(CancelPreviousAndSubscribeNew(SubscribeRequest(SYNEREOCircuit.zoom(_.sessionRootModel.sessionUri).value, expr)))
      ContentUtils.cancelPreviousAndSubscribeNew(SubscribeRequest(SYNEREOCircuit.zoom(_.sessionRootModel.sessionUri).value, expr))
    }

    def render(s: State, p: Props) = {
      <.div(
        <.div(^.id := s.connectionsSelectizeInputId, SynereoCommanStylesCSS.Style.searchBoxContainer)(
          ConnectionsLabelsSelectize(ConnectionsLabelsSelectize.Props(s.connectionsSelectizeInputId))
        ),
        <.div(SynereoCommanStylesCSS.Style.displayInline)(
          <.button(^.className := "btn btn-primary", SynereoCommanStylesCSS.Style.searchBtn, ^.onClick ==> searchWithLblAndCnxn)(MIcon.apply("search", "24")
          ))
      )
    }
  }

  val component = ReactComponentB[Props]("SearchComponent")
    .initialState_P(p => State())
    .renderBackend[Backend]
    .componentDidMount(scope => scope.backend.mounted(scope.props))
    .build

  def apply(props: Props) = component(props)
} 
Example 109
Source File: DocumentAssemblerTestSpec.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.nlp

import org.scalatest._
import org.apache.spark.sql.Row
import scala.language.reflectiveCalls
import Matchers._

class DocumentAssemblerTestSpec extends FlatSpec {
  def fixture = new {
    val text = ContentProvider.englishPhrase
    val df = AnnotatorBuilder.withDocumentAssembler(DataBuilder.basicDataBuild(text))
    val assembledDoc = df
      .select("document")
      .collect
      .flatMap { _.getSeq[Row](0) }
      .map { Annotation(_) }
  }

  "A DocumentAssembler" should "annotate with the correct indexes" in {
    val f = fixture
    f.text.head should equal (f.text(f.assembledDoc.head.begin))
    f.text.last should equal (f.text(f.assembledDoc.head.end))
  }
} 
Example 110
Source File: TokenizerBehaviors.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.nlp.annotators

import com.johnsnowlabs.nlp.{Annotation, AnnotatorBuilder, AnnotatorType}
import org.apache.spark.sql.{Dataset, Row}
import org.scalatest._

import scala.language.reflectiveCalls

trait TokenizerBehaviors { this: FlatSpec =>

  def fixture(dataset: => Dataset[Row]) = new {
    val df = AnnotatorBuilder.withTokenizer(AnnotatorBuilder.withTokenizer(dataset))
    val documents = df.select("document")
    val sentences = df.select("sentence")
    val tokens = df.select("token")
    val sentencesAnnotations = sentences
      .collect
      .flatMap { r => r.getSeq[Row](0) }
      .map { a => Annotation(a.getString(0), a.getInt(1), a.getInt(2), a.getString(3), a.getMap[String, String](4)) }
    val tokensAnnotations = tokens
      .collect
      .flatMap { r => r.getSeq[Row](0)}
      .map { a => Annotation(a.getString(0), a.getInt(1), a.getInt(2), a.getString(3), a.getMap[String, String](4)) }

    val docAnnotations = documents
      .collect
      .flatMap { r => r.getSeq[Row](0)}
      .map { a => Annotation(a.getString(0), a.getInt(1), a.getInt(2), a.getString(3), a.getMap[String, String](4)) }

    val corpus = docAnnotations
      .map(d => d.result)
      .mkString("")
  }

  def fullTokenizerPipeline(dataset: => Dataset[Row]) {
    "A Tokenizer Annotator" should "successfully transform data" in {
      val f = fixture(dataset)
      assert(f.tokensAnnotations.nonEmpty, "Tokenizer should add annotators")
    }

    it should "annotate using the annotatorType of token" in {
      val f = fixture(dataset)
      assert(f.tokensAnnotations.nonEmpty, "Tokenizer should add annotators")
      f.tokensAnnotations.foreach { a =>
        assert(a.annotatorType == AnnotatorType.TOKEN, "Tokenizer annotations type should be equal to 'token'")
      }
    }

    it should "annotate with the correct word indexes" in {
      val f = fixture(dataset)
      f.tokensAnnotations.foreach { a =>
        val token = a.result
        val sentenceToken = f.corpus.slice(a.begin, a.end + 1)
        assert(sentenceToken == token, s"Word ($sentenceToken) from sentence at (${a.begin},${a.end}) should be equal to token ($token) inside the corpus ${f.corpus}")
      }
    }
  }
} 
Example 111
Source File: PragmaticSentimentBehaviors.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.nlp.annotators.sda.pragmatic

import com.johnsnowlabs.nlp.annotators.common.TokenizedSentence
import com.johnsnowlabs.nlp.{Annotation, AnnotatorBuilder}
import org.apache.spark.sql.{Dataset, Row}
import org.scalatest._
import com.johnsnowlabs.nlp.AnnotatorType.SENTIMENT
import com.johnsnowlabs.nlp.util.io.{ExternalResource, ReadAs, ResourceHelper}

import scala.language.reflectiveCalls

trait PragmaticSentimentBehaviors { this: FlatSpec =>

  def fixture(dataset: Dataset[Row]) = new {
    val df = AnnotatorBuilder.withPragmaticSentimentDetector(dataset)
    val sdAnnotations = Annotation.collect(df, "sentiment").flatten
  }

  def isolatedSentimentDetector(tokenizedSentences: Array[TokenizedSentence], expectedScore: Double): Unit = {
    s"tagged sentences" should s"have an expected score of $expectedScore" in {
      val pragmaticScorer = new PragmaticScorer(ResourceHelper.parseKeyValueText(ExternalResource("src/test/resources/sentiment-corpus/default-sentiment-dict.txt", ReadAs.TEXT, Map("delimiter" -> ","))))
      val result = pragmaticScorer.score(tokenizedSentences)
      assert(result == expectedScore, s"because result: $result did not match expected: $expectedScore")
    }
  }

  def sparkBasedSentimentDetector(dataset: => Dataset[Row]): Unit = {

    "A Pragmatic Sentiment Analysis Annotator" should s"create annotations" in {
      val f = fixture(dataset)
      assert(f.sdAnnotations.size > 0)
    }

    it should "create annotations with the correct type" in {
      val f = fixture(dataset)
      f.sdAnnotations.foreach { a =>
        assert(a.annotatorType == SENTIMENT)
      }
    }

    it should "successfully score sentences" in {
      val f = fixture(dataset)
      f.sdAnnotations.foreach { a =>
        assert(List("positive", "negative").contains(a.result))
      }
    }
  }
} 
Example 112
Source File: MultiDateMatcherBehaviors.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.nlp.annotators

import com.johnsnowlabs.nlp.AnnotatorType.DATE
import com.johnsnowlabs.nlp.{Annotation, AnnotatorBuilder}
import org.apache.spark.sql.{Dataset, Row}
import org.scalatest.Matchers._
import org.scalatest._

import scala.language.reflectiveCalls

trait MultiDateMatcherBehaviors extends FlatSpec {
  def fixture(dataset: Dataset[Row]) = new {
    val df = AnnotatorBuilder.withMultiDateMatcher(dataset)
    val dateAnnotations = df.select("date")
      .collect
      .flatMap { _.getSeq[Row](0) }
      .map { Annotation(_) }
  }

  def sparkBasedDateMatcher(dataset: => Dataset[Row]): Unit = {
    "A MultiDateMatcher Annotator" should s"successfuly parse dates" in {
      val f = fixture(dataset)
      f.dateAnnotations.foreach { a =>
        val d: String = a.result
        d should fullyMatch regex """\d+/\d+/\d+"""
      }
    }

    it should "create annotations" in {
      val f = fixture(dataset)
      assert(f.dateAnnotations.size > 0)
    }

    it should "create annotations with the correct type" in {
      val f = fixture(dataset)
      f.dateAnnotations.foreach { a =>
        assert(a.annotatorType == DATE)
      }
    }
  }
} 
Example 113
Source File: PragmaticDetectionBehaviors.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.nlp.annotators.sbd.pragmatic

import com.johnsnowlabs.nlp.{Annotation, AnnotatorBuilder, AnnotatorType}
import org.apache.spark.sql.{Dataset, Row}
import org.scalatest._

import scala.language.reflectiveCalls

trait PragmaticDetectionBehaviors { this: FlatSpec =>

  def fixture(dataset: => Dataset[Row]) = new {
    val df = AnnotatorBuilder.withFullPragmaticSentenceDetector(dataset)
    val documents = df.select("document")
    val sentences = df.select("sentence")
    val sentencesAnnotations = sentences
      .collect
      .flatMap { r => r.getSeq[Row](0) }
      .map { a => Annotation(a.getString(0), a.getInt(1), a.getInt(2), a.getString(3), a.getMap[String, String](4)) }
    val corpus = sentencesAnnotations
      .flatMap { a => a.result }
      .mkString("")
  }

  private def f1Score(result: Array[String], expected: Array[String]): Double = {
    val nMatches = result.count(expected.contains(_))
    val nOutput = result.length
    val nExpected = expected.length
    val precision = nMatches / nOutput.toDouble
    val recall = nMatches / nExpected.toDouble
    (2 * precision * recall) / (precision + recall)
  }

  def isolatedPDReadAndMatchResult(input: String, correctAnswer: Array[String], customBounds: Array[String] = Array.empty[String]): Unit = {
    s"pragmatic boundaries detector with ${input.take(10)}...:" should
      s"successfully identify sentences as ${correctAnswer.take(1).take(10).mkString}..." in {
      val pragmaticApproach = new MixedPragmaticMethod(true, customBounds)
      val result = pragmaticApproach.extractBounds(input)
      val diffInResult = result.map(_.content).diff(correctAnswer)
      val diffInCorrect = correctAnswer.diff(result.map(_.content))
      assert(
        result.map(_.content).sameElements(correctAnswer),
        s"\n--------------\nSENTENCE IS WRONG:\n--------------\n$input" +
        s"\n--------------\nBECAUSE RESULT:\n--------------\n@@${diffInResult.mkString("\n@@")}" +
          s"\n--------------\nIS NOT EXPECTED:\n--------------\n@@${diffInCorrect.mkString("\n@@")}")
      assert(result.forall(sentence => {
        sentence.end == sentence.start + sentence.content.length - 1
      }), "because length mismatch")
    }
  }

  def isolatedPDReadAndMatchResultTag(input: String, correctAnswer: Array[String], customBounds: Array[String] = Array.empty[String], splitLength: Option[Int] = None): Unit = {
    s"pragmatic boundaries detector with ${input.take(10)}...:" should
      s"successfully identify sentences as ${correctAnswer.take(1).take(10).mkString}..." in {
      val sentenceDetector = new SentenceDetector()
      if (splitLength.isDefined)
        sentenceDetector.setSplitLength(splitLength.get)
      val result = sentenceDetector.tag(input).map(_.content)
      val diffInResult = result.diff(correctAnswer)
      val diffInCorrect = correctAnswer.diff(result)
      assert(
        result.sameElements(correctAnswer),
        s"\n--------------\nSENTENCE IS WRONG:\n--------------\n$input" +
          s"\n--------------\nBECAUSE RESULT:\n--------------\n@@${diffInResult.mkString("\n@@")}" +
          s"\n--------------\nIS NOT EXPECTED:\n--------------\n@@${diffInCorrect.mkString("\n@@")}")
    }
  }

  def isolatedPDReadScore(input: String, correctAnswer: Array[String], customBounds: Array[String] = Array.empty[String]): Unit = {
    s"boundaries prediction" should s"have an F1 score higher than 95%" in {
      val pragmaticApproach = new MixedPragmaticMethod(true, customBounds)
      val result = pragmaticApproach.extractBounds(input).map(_.content)
      val f1 = f1Score(result, correctAnswer)
      val unmatched = result.zip(correctAnswer).toMap.mapValues("\n"+_)
      info(s"F1 Score is: $f1")
      assert(f1 > 0.95, s"F1 Score is below 95%.\nMatch sentences:\n${unmatched.mkString("\n")}")
    }
  }

  def sparkBasedSentenceDetector(dataset: => Dataset[Row]): Unit = {
    "a Pragmatic Sentence Detection Annotator" should s"successfully annotate documents" in {
      val f = fixture(dataset)
      assert(f.sentencesAnnotations.nonEmpty, "Annotations should exists")
    }

    it should "add annotators of type sbd" in {
      val f = fixture(dataset)
      f.sentencesAnnotations.foreach { a =>
        assert(a.annotatorType == AnnotatorType.DOCUMENT, "annotatorType should sbd")
      }
    }
  }
} 
Example 114
Source File: RegexMatcherBehaviors.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.nlp.annotators

import com.johnsnowlabs.nlp.{Annotation, AnnotatorBuilder}
import org.apache.spark.sql.{Dataset, Row}
import org.scalatest._
import com.johnsnowlabs.nlp.AnnotatorType.CHUNK
import scala.language.reflectiveCalls

trait RegexMatcherBehaviors { this: FlatSpec =>
  def fixture(dataset: Dataset[Row], rules: Array[(String, String)], strategy: String) = new {
    val annotationDataset: Dataset[_] = AnnotatorBuilder.withRegexMatcher(dataset, strategy)
    val regexAnnotations: Array[Annotation] = annotationDataset.select("regex")
      .collect
      .flatMap { _.getSeq[Row](0) }
      .map { Annotation(_) }

    annotationDataset.show()
  }

  def customizedRulesRegexMatcher(dataset: => Dataset[Row], rules: Array[(String, String)], strategy: String): Unit = {
    "A RegexMatcher Annotator with custom rules" should s"successfuly match ${rules.map(_._1).mkString(",")}" in {
      val f = fixture(dataset, rules, strategy)
      f.regexAnnotations.foreach { a =>
        assert(Seq("followed by 'the'", "ceremony").contains(a.metadata("identifier")))
      }
    }

    it should "create annotations" in {
      val f = fixture(dataset, rules, strategy)
      assert(f.regexAnnotations.nonEmpty)
    }

    it should "create annotations with the correct tag" in {
      val f = fixture(dataset, rules, strategy)
      f.regexAnnotations.foreach { a =>
        assert(a.annotatorType == CHUNK)
      }
    }
  }
} 
Example 115
Source File: DateMatcherBehaviors.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.nlp.annotators

import java.util.Date

import com.johnsnowlabs.nlp.{Annotation, AnnotatorBuilder}
import org.apache.spark.sql.{Dataset, Row}
import org.scalatest._
import org.scalatest.Matchers._
import com.johnsnowlabs.nlp.AnnotatorType.DATE
import scala.language.reflectiveCalls

trait DateMatcherBehaviors extends FlatSpec {
  def fixture(dataset: Dataset[Row]) = new {
    val df = AnnotatorBuilder.withDateMatcher(dataset)
    val dateAnnotations = df.select("date")
      .collect
      .flatMap { _.getSeq[Row](0) }
      .map { Annotation(_) }
  }

  def sparkBasedDateMatcher(dataset: => Dataset[Row]): Unit = {
    "A DateMatcher Annotator" should s"successfuly parse dates" in {
      val f = fixture(dataset)
      f.dateAnnotations.foreach { a =>
        val d: String = a.result
        d should fullyMatch regex """\d+/\d+/\d+"""
      }
    }

    it should "create annotations" in {
      val f = fixture(dataset)
      assert(f.dateAnnotations.size > 0)
    }

    it should "create annotations with the correct type" in {
      val f = fixture(dataset)
      f.dateAnnotations.foreach { a =>
        assert(a.annotatorType == DATE)
      }
    }
  }
} 
Example 116
Source File: TemplateBaseSpec.scala    From cloudformation-template-generator   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package com.monsanto.arch.cloudformation.model
import scala.language.reflectiveCalls
import com.monsanto.arch.cloudformation.model.resource.`AWS::SQS::Queue`
import org.scalatest.{FunSpec, Matchers}

class TemplateBaseSpec extends FunSpec with Matchers {

  it("should find components of templates") {
    object MyTemplate extends TemplateBase {
      val param1 = StringParameter("test1", "desc1")
      def resource1 = `AWS::SQS::Queue`(
        name = "resource1",
        QueueName = "test1",
        DelaySeconds = 5,
        MessageRetentionPeriod = 2,
        ReceiveMessageWaitTimeSeconds = 9,
        VisibilityTimeout = 4
      )
      lazy val out1 = Output(name = "out1", Description = "desc", Value = `AWS::AccountId`)
    }

    MyTemplate.template.Outputs.toSeq.flatten should contain(MyTemplate.out1)
    MyTemplate.template.Parameters.toSeq.flatten should contain(MyTemplate.param1)
    MyTemplate.template.Resources should contain(MyTemplate.resource1)
  }

  it("should find instances of HasTemplate") {
    object MyTemplate extends TemplateBase {

      lazy val anotherTemplate = new TemplateBase {
        def resource1 = `AWS::SQS::Queue`(
          name = "resource1",
          QueueName = "test1",
          DelaySeconds = 5,
          MessageRetentionPeriod = 2,
          ReceiveMessageWaitTimeSeconds = 9,
          VisibilityTimeout = 4
        )
      }

      lazy val anotherTemplate2 = new TemplateBase {
        def resource = `AWS::SQS::Queue`(
          name = "resource2",
          QueueName = "test2",
          DelaySeconds = 5,
          MessageRetentionPeriod = 2,
          ReceiveMessageWaitTimeSeconds = 9,
          VisibilityTimeout = 4
        )
      }
    }

    MyTemplate.template.Resources should contain(MyTemplate.anotherTemplate.resource1)
    MyTemplate.template.Resources should contain(MyTemplate.anotherTemplate2.resource)
  }

  it("should find instances of Template") {
    val queue = `AWS::SQS::Queue`(
      name = "resource1",
      QueueName = "test1",
      DelaySeconds = 5,
      MessageRetentionPeriod = 2,
      ReceiveMessageWaitTimeSeconds = 9,
      VisibilityTimeout = 4
    )
    object MyTemplate extends TemplateBase {

      lazy val anotherTemplate = Template.EMPTY ++ queue

    }

    MyTemplate.template.Resources should contain(queue)
  }

} 
Example 117
Source File: MutationTypes.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.shared.mutactions

import cool.graph.Types.Id
import cool.graph.shared.errors.UserAPIErrors
import cool.graph.shared.models.Field

import scala.language.reflectiveCalls

object MutationTypes {
  case class ArgumentValue(name: String, value: Any, field: Option[Field] = None) {
    def unwrappedValue: Any = {
      def unwrapSome(x: Any): Any = {
        x match {
          case Some(x) => x
          case x       => x
        }
      }
      unwrapSome(value)
    }
  }
  object ArgumentValue {
    def apply(name: String, value: Any, field: Field): ArgumentValue = ArgumentValue(name, value, Some(field))
  }

  object ArgumentValueList {
    def getId(args: List[ArgumentValue]): Option[Id] = args.find(_.name == "id").map(_.value.toString)
    def getId_!(args: List[ArgumentValue]): Id       = getId(args).getOrElse(throw UserAPIErrors.IdIsMissing())

  }
} 
Example 118
Source File: TransactionSpec.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph

import cool.graph.client.database.DataResolver
import cool.graph.shared.database.Databases
import org.scalatest.{FlatSpec, Matchers}

import scala.concurrent.Future
import scala.util.{Failure, Random, Success, Try}

class TransactionSpec extends FlatSpec with Matchers {
  import cool.graph.util.AwaitUtils._

  import scala.language.reflectiveCalls

  val dataResolver: DataResolver = null // we don't need it for those tests

  "Transaction.verify" should "return a success if it contains no Mutactions at all" in {
    val transaction = Transaction(List.empty, dataResolver)
    val result      = await(transaction.verify())
    result should be(Success(MutactionVerificationSuccess()))
  }

  "Transaction.verify" should "return a success if all Mutactions succeed" in {
    val mutactions  = List(successfulMutaction, successfulMutaction, successfulMutaction)
    val transaction = Transaction(mutactions, dataResolver)
    val result      = await(transaction.verify())
    result should be(Success(MutactionVerificationSuccess()))
  }

  "Transaction.verify" should "return the failure of the first failed Mutaction" in {
    for (i <- 1 to 10) {
      val failedMutactions =
        Random.shuffle(List(failedMutaction("error 1"), failedMutaction("error 2"), failedMutaction("error 3")))
      val mutactions  = List(successfulMutaction) ++ failedMutactions
      val transaction = Transaction(mutactions, dataResolver)
      val result      = await(transaction.verify())
      result.isFailure should be(true)
      result.failed.get.getMessage should be(failedMutactions.head.errorMessage)
    }
  }

  def failedMutaction(errorMsg: String) = {
    new ClientSqlMutaction {
      val errorMessage = errorMsg

      override def execute = ???

      override def verify(): Future[Try[MutactionVerificationSuccess]] = {
        Future.successful(Failure(new Exception(errorMessage)))
      }
    }
  }

  def successfulMutaction = {
    new ClientSqlMutaction {
      override def execute = ???

      override def verify(): Future[Try[MutactionVerificationSuccess]] = {
        Future.successful(Success(MutactionVerificationSuccess()))
      }
    }
  }
} 
Example 119
Source File: XORShiftRandomSuite.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util.random

import scala.language.reflectiveCalls

import org.apache.commons.math3.stat.inference.ChiSquareTest
import org.scalatest.Matchers

import org.apache.spark.SparkFunSuite
import org.apache.spark.util.Utils.times

class XORShiftRandomSuite extends SparkFunSuite with Matchers {

  private def fixture = new {
    val seed = 1L
    val xorRand = new XORShiftRandom(seed)
    val hundMil = 1e8.toInt
  }

  
    val chiTest = new ChiSquareTest
    assert(chiTest.chiSquareTest(bins, 0.05) === false)
  }

  test ("XORShift with zero seed") {
    val random = new XORShiftRandom(0L)
    assert(random.nextInt() != 0)
  }

  test ("hashSeed has random bits throughout") {
    val totalBitCount = (0 until 10).map { seed =>
      val hashed = XORShiftRandom.hashSeed(seed)
      val bitCount = java.lang.Long.bitCount(hashed)
      // make sure we have roughly equal numbers of 0s and 1s.  Mostly just check that we
      // don't have all 0s or 1s in the high bits
      bitCount should be > 20
      bitCount should be < 44
      bitCount
    }.sum
    // and over all the seeds, very close to equal numbers of 0s & 1s
    totalBitCount should be > (32 * 10 - 30)
    totalBitCount should be < (32 * 10 + 30)
  }
} 
Example 120
Source File: DiskBlockManagerSuite.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.Utils

class DiskBlockManagerSuite extends SparkFunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    try {
      Utils.deleteRecursively(rootDir0)
      Utils.deleteRecursively(rootDir1)
    } finally {
      super.afterAll()
    }
  }

  override def beforeEach() {
    super.beforeEach()
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(conf, deleteFilesOnStop = true)
  }

  override def afterEach() {
    try {
      diskBlockManager.stop()
    } finally {
      super.afterEach()
    }
  }

  test("basic block creation") {
    val blockId = new TestBlockId("test")
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 121
Source File: HashShuffleManagerSuite.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.shuffle.hash

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkEnv, SparkFunSuite}
import org.apache.spark.executor.ShuffleWriteMetrics
import org.apache.spark.network.buffer.{FileSegmentManagedBuffer, ManagedBuffer}
import org.apache.spark.serializer.JavaSerializer
import org.apache.spark.shuffle.FileShuffleBlockResolver
import org.apache.spark.storage.{ShuffleBlockId, FileSegment}

class HashShuffleManagerSuite extends SparkFunSuite with LocalSparkContext {
  private val testConf = new SparkConf(false)

  private def checkSegments(expected: FileSegment, buffer: ManagedBuffer) {
    assert(buffer.isInstanceOf[FileSegmentManagedBuffer])
    val segment = buffer.asInstanceOf[FileSegmentManagedBuffer]
    assert(expected.file.getCanonicalPath === segment.getFile.getCanonicalPath)
    assert(expected.offset === segment.getOffset)
    assert(expected.length === segment.getLength)
  }

  test("consolidated shuffle can write to shuffle group without messing existing offsets/lengths") {

    val conf = new SparkConf(false)
    // reset after EACH object write. This is to ensure that there are bytes appended after
    // an object is written. So if the codepaths assume writeObject is end of data, this should
    // flush those bugs out. This was common bug in ExternalAppendOnlyMap, etc.
    conf.set("spark.serializer.objectStreamReset", "1")
    conf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer")
    conf.set("spark.shuffle.manager", "org.apache.spark.shuffle.hash.HashShuffleManager")

    sc = new SparkContext("local", "test", conf)

    val shuffleBlockResolver =
      SparkEnv.get.shuffleManager.shuffleBlockResolver.asInstanceOf[FileShuffleBlockResolver]

    val shuffle1 = shuffleBlockResolver.forMapTask(1, 1, 1, new JavaSerializer(conf),
      new ShuffleWriteMetrics)
    for (writer <- shuffle1.writers) {
      writer.write("test1", "value")
      writer.write("test2", "value")
    }
    for (writer <- shuffle1.writers) {
      writer.commitAndClose()
    }

    val shuffle1Segment = shuffle1.writers(0).fileSegment()
    shuffle1.releaseWriters(success = true)

    val shuffle2 = shuffleBlockResolver.forMapTask(1, 2, 1, new JavaSerializer(conf),
      new ShuffleWriteMetrics)

    for (writer <- shuffle2.writers) {
      writer.write("test3", "value")
      writer.write("test4", "vlue")
    }
    for (writer <- shuffle2.writers) {
      writer.commitAndClose()
    }
    val shuffle2Segment = shuffle2.writers(0).fileSegment()
    shuffle2.releaseWriters(success = true)

    // Now comes the test :
    // Write to shuffle 3; and close it, but before registering it, check if the file lengths for
    // previous task (forof shuffle1) is the same as 'segments'. Earlier, we were inferring length
    // of block based on remaining data in file : which could mess things up when there is
    // concurrent read and writes happening to the same shuffle group.

    val shuffle3 = shuffleBlockResolver.forMapTask(1, 3, 1, new JavaSerializer(testConf),
      new ShuffleWriteMetrics)
    for (writer <- shuffle3.writers) {
      writer.write("test3", "value")
      writer.write("test4", "value")
    }
    for (writer <- shuffle3.writers) {
      writer.commitAndClose()
    }
    // check before we register.
    checkSegments(shuffle2Segment, shuffleBlockResolver.getBlockData(ShuffleBlockId(1, 2, 0)))
    shuffle3.releaseWriters(success = true)
    checkSegments(shuffle2Segment, shuffleBlockResolver.getBlockData(ShuffleBlockId(1, 2, 0)))
    shuffleBlockResolver.removeShuffle(1)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 122
Source File: DiskBlockManagerSuite.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.mockito.Mockito.{mock, when}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.Utils

class DiskBlockManagerSuite extends SparkFunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  val blockManager = mock(classOf[BlockManager])
  when(blockManager.conf).thenReturn(testConf)
  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    super.afterAll()
    Utils.deleteRecursively(rootDir0)
    Utils.deleteRecursively(rootDir1)
  }

  override def beforeEach() {
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(blockManager, conf)
  }

  override def afterEach() {
    diskBlockManager.stop()
  }

  test("basic block creation") {
    val blockId = new TestBlockId("test")
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 123
Source File: AvoidAliasConflictComplexSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.norm.capture

import io.getquill.Spec
import io.getquill.testContext._
import io.getquill.Query

class AvoidAliasConflictComplexSpec extends Spec {

  "properly aliases explicit join sets" - {
    import io.getquill.norm.Normalize
    import scala.language.reflectiveCalls

    case class Person(id: Int, name: String)
    case class Address(id: Int, ownerFk: Int, street: String)
    case class Room(addressId: Int, stuff: String)

    "in tail clause" in {
      def fun[T <: { def id: Int }] = quote {
        (tbl: Query[T]) =>
          for {
            t <- tbl
            a <- query[Address].join(a => a.ownerFk == t.id)
          } yield (t, a)
      }

      def funExpect[T <: { def id: Int }] = quote {
        (tbl: Query[T]) =>
          for {
            t <- tbl
            a <- query[Address].join(a1 => a1.ownerFk == t.id)
          } yield (t, a)
      }

      val q = quote {
        fun[Person](query[Person].filter(a => a.name == "Joe"))
      }
      val expect = quote {
        funExpect[Person](query[Person].filter(a => a.name == "Joe"))
      }
      Normalize(q.ast) mustEqual Normalize(expect.ast)
    }

    "in middle clause" in {
      def fun[T <: { def id: Int }] = quote {
        (tbl: Query[T]) =>
          for {
            t <- tbl
            a <- query[Address].join(a => a.ownerFk == t.id)
            r <- query[Room].join(r => r.addressId == a.id)
          } yield (t, a, r)
      }

      def funExpect[T <: { def id: Int }] = quote {
        (tbl: Query[T]) =>
          for {
            t <- tbl
            a <- query[Address].join(a1 => a1.ownerFk == t.id)
            r <- query[Room].join(r => r.addressId == a.id)
          } yield (t, a, r)
      }

      val q = quote {
        fun[Person](query[Person].filter(a => a.name == "Joe"))
      }
      val expect = quote {
        funExpect[Person](query[Person].filter(a => a.name == "Joe"))
      }
      Normalize(q.ast) mustEqual Normalize(expect.ast)
    }

    "in middle and end clause" in {
      def fun[T <: { def id: Int }] = quote {
        (tbl: Query[T]) =>
          for {
            t <- tbl
            a <- query[Address].join(a => a.ownerFk == t.id)
            r <- query[Room].join(a => a.addressId == 1)
          } yield (t, a, r)
      }

      def funExpect[T <: { def id: Int }] = quote {
        (tbl: Query[T]) =>
          for {
            t <- tbl
            a <- query[Address].join(a1 => a1.ownerFk == t.id)
            r <- query[Room].join(a2 => a2.addressId == 1)
          } yield (t, a, r)
      }

      val q = quote {
        fun[Person](query[Person].filter(a => a.name == "Joe"))
      }
      val expect = quote {
        funExpect[Person](query[Person].filter(a => a.name == "Joe"))
      }
      Normalize(q.ast) mustEqual Normalize(expect.ast)
    }
  }
} 
Example 124
Source File: LagomServiceLocator.scala    From reactive-lib   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.rp.servicediscovery.lagom.scaladsl

import akka.actor.ActorSystem
import com.lightbend.lagom.scaladsl.api.Descriptor
import com.lightbend.lagom.scaladsl.client.{ CircuitBreakersPanel, CircuitBreakingServiceLocator }
import java.net.{ URI => JavaURI }
import com.lightbend.rp.servicediscovery.scaladsl.ServiceLocator
import scala.concurrent.{ ExecutionContext, Future }
import scala.language.reflectiveCalls

class LagomServiceLocator(circuitBreakersPanel: CircuitBreakersPanel)(implicit as: ActorSystem, ec: ExecutionContext) extends CircuitBreakingServiceLocator(circuitBreakersPanel)(ec) {

  override def locate(name: String, serviceCall: Descriptor.Call[_, _]): Future[Option[JavaURI]] =
    for {
      http <- ServiceLocator.lookupOne(name, "http")
      result <- http match {
        case None => ServiceLocator.lookupOne(name)
        case Some(r) => Future.successful(Some(r))
      }
    } yield result.map(_.uri)
} 
Example 125
Source File: MultiColumnEstimator.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables

import scala.language.reflectiveCalls
import scala.reflect.runtime.universe.TypeTag

import org.apache.spark.sql.types.StructType

import ai.deepsense.deeplang.ExecutionContext
import ai.deepsense.deeplang.doperables.dataframe.DataFrame
import ai.deepsense.deeplang.doperables.multicolumn.HasSpecificParams
import ai.deepsense.deeplang.doperables.multicolumn.MultiColumnParams.MultiColumnInPlaceChoices.{MultiColumnNoInPlace, MultiColumnYesInPlace}
import ai.deepsense.deeplang.doperables.multicolumn.MultiColumnParams.SingleOrMultiColumnChoices.{MultiColumnChoice, SingleColumnChoice}
import ai.deepsense.deeplang.doperables.multicolumn.SingleColumnParams.SingleTransformInPlaceChoices.{NoInPlaceChoice, YesInPlaceChoice}
import ai.deepsense.deeplang.doperables.spark.wrappers.params.common.HasInputColumn
import ai.deepsense.deeplang.params.IOColumnsParam
import ai.deepsense.deeplang.params.selections.NameSingleColumnSelection


  override private[deeplang] def _fit_infer(
      schema: Option[StructType]): T = {
    $(singleOrMultiChoiceParam) match {
      case single: SingleColumnChoice =>
        handleSingleColumnChoiceInfer(schema, single)
      case multi: MultiColumnChoice =>
        handleMultiColumnChoiceInfer(schema, multi)
    }
  }
} 
Example 126
Source File: DecisionTreeParams.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params

import scala.language.reflectiveCalls

import ai.deepsense.deeplang.doperables.spark.wrappers.params.common._
import ai.deepsense.deeplang.params.Params

trait DecisionTreeParams
  extends Params
  with PredictorParams
  with HasCheckpointIntervalParam
  with HasSeedParam
  with HasMaxDepthParam
  with HasMaxBinsParam
  with HasMinInstancePerNodeParam
  with HasMinInfoGainParam
  with HasMaxMemoryInMBParam
  with HasCacheNodeIdsParam 
Example 127
Source File: GBTParams.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.doperables.spark.wrappers.params.common._
import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.validators.RangeValidator
import ai.deepsense.deeplang.params.wrappers.spark.{DoubleParamWrapper, IntParamWrapper}

trait GBTParams extends Params
  with PredictorParams
  with HasLabelColumnParam
  with HasMaxIterationsParam
  with HasSeedParam
  with HasStepSizeParam
  with HasMaxBinsParam
  with HasMaxDepthParam
  with HasMinInfoGainParam
  with HasMinInstancePerNodeParam
  with HasSubsamplingRateParam 
Example 128
Source File: AFTSurvivalRegressionParams.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.doperables.spark.wrappers.params.common._
import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.validators.{ArrayLengthValidator, ComplexArrayValidator, RangeValidator}
import ai.deepsense.deeplang.params.wrappers.spark.DoubleArrayParamWrapper

trait AFTSurvivalRegressionParams extends Params
    with PredictorParams
    with HasOptionalQuantilesColumnParam {

  val quantileProbabilities =
    new DoubleArrayParamWrapper[
        ml.param.Params { val quantileProbabilities: ml.param.DoubleArrayParam }](
      name = "quantile probabilities",
      description = Some("""Param for quantile probabilities array.
                      |Values of the quantile probabilities array should be in the range (0, 1)
                      |and the array should be non-empty.""".stripMargin),
      sparkParamGetter = _.quantileProbabilities,
      validator = ComplexArrayValidator(
        rangeValidator = RangeValidator(0, 1, beginIncluded = false, endIncluded = false),
        lengthValidator = ArrayLengthValidator.withAtLeast(1)
      ))
  setDefault(quantileProbabilities, Array(0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99))
} 
Example 129
Source File: Word2VecParams.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.doperables.spark.wrappers.params.common._
import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.validators.RangeValidator
import ai.deepsense.deeplang.params.wrappers.spark.IntParamWrapper

trait Word2VecParams extends Params
  with HasMaxIterationsParam
  with HasStepSizeParam
  with HasSeedParam {

  val vectorSize = new IntParamWrapper[ml.param.Params { val vectorSize: ml.param.IntParam }](
    name = "vector size",
    description = Some("The dimension of codes after transforming from words."),
    sparkParamGetter = _.vectorSize,
    validator = RangeValidator.positiveIntegers)
  setDefault(vectorSize -> 100)

  val numPartitions = new IntParamWrapper[ml.param.Params { val numPartitions: ml.param.IntParam }](
    name = "num partitions",
    description = Some("The number of partitions for sentences of words."),
    sparkParamGetter = _.numPartitions,
    validator = RangeValidator.positiveIntegers)
  setDefault(numPartitions -> 1)

  val minCount = new IntParamWrapper[ml.param.Params { val minCount: ml.param.IntParam }](
    name = "min count",
    description = Some("The minimum number of occurences of a token to " +
      "be included in the model's vocabulary."),
    sparkParamGetter = _.minCount,
    validator = RangeValidator.positiveIntegers)
  setDefault(minCount -> 5)

  def setMinCount(value: Int): this.type = {
    set(minCount -> value)
  }

  def setVectorSize(value: Int): this.type = {
    set(vectorSize -> value)
  }
} 
Example 130
Source File: HasItemColumnParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.selections.NameSingleColumnSelection
import ai.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasItemColumnParam extends Params {

  val itemColumn =
    new SingleColumnSelectorParamWrapper[
      ml.param.Params { val itemCol: ml.param.Param[String] }](
      name = "item column",
      description = Some("The column for item ids."),
      sparkParamGetter = _.itemCol,
      portIndex = 0)
  setDefault(itemColumn, NameSingleColumnSelection("item"))
} 
Example 131
Source File: MinMaxParams.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.wrappers.spark.DoubleParamWrapper

trait MinMaxParams extends Params {

  val min = new DoubleParamWrapper[ml.param.Params { val min: ml.param.DoubleParam }](
    name = "min",
    description = Some("The lower bound after transformation, shared by all features."),
    sparkParamGetter = _.min)
  setDefault(min, 0.0)

  val max = new DoubleParamWrapper[ml.param.Params { val max: ml.param.DoubleParam }](
    name = "max",
    description = Some("The upper bound after transformation, shared by all features."),
    sparkParamGetter = _.max)
  setDefault(max, 1.0)

  def setMin(value: Double): this.type = {
    set(min, value)
  }

  def setMax(value: Double): this.type = {
    set(max, value)
  }
} 
Example 132
Source File: HasCheckpointIntervalParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.validators.RangeValidator
import ai.deepsense.deeplang.params.wrappers.spark.IntParamWrapper

trait HasCheckpointIntervalParam extends Params {

  val checkpointInterval = new IntParamWrapper[
      ml.param.Params { val checkpointInterval: ml.param.IntParam }](
    name = "checkpoint interval",
    description = Some("""The checkpoint interval. E.g. 10 means that the cache will get checkpointed
        |every 10 iterations.""".stripMargin),
    sparkParamGetter = _.checkpointInterval,
    validator = RangeValidator(begin = 1.0, end = Int.MaxValue, step = Some(1.0)))
  setDefault(checkpointInterval, 10.0)
} 
Example 133
Source File: HasOutputColumn.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.selections.NameSingleColumnSelection
import ai.deepsense.deeplang.params.wrappers.spark.SingleColumnCreatorParamWrapper

trait HasOutputColumn extends Params {

  val outputColumn = new SingleColumnCreatorParamWrapper[
      ml.param.Params { val outputCol: ml.param.Param[String] }](
    name = "output column",
    description = Some("The output column name."),
    sparkParamGetter = _.outputCol)

  def setOutputColumn(value: String): this.type = {
    set(outputColumn, value)
  }
} 
Example 134
Source File: HasSolverParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.param.{Param => SparkParam}

import ai.deepsense.deeplang.params.choice.Choice
import ai.deepsense.deeplang.params.wrappers.spark.ChoiceParamWrapper
import ai.deepsense.deeplang.params.{Param, Params}

trait HasSolverParam extends Params {
  val solver =
    new ChoiceParamWrapper[
        ml.param.Params { val solver: SparkParam[String]}, SolverChoice.SolverOption](
      name = "solver",
      sparkParamGetter = _.solver,
      description =
        Some("""Sets the solver algorithm used for optimization.
          |Can be set to "l-bfgs", "normal" or "auto".
          |"l-bfgs" denotes Limited-memory BFGS which is a limited-memory quasi-Newton
          |optimization method. "normal" denotes Normal Equation. It is an analytical
          |solution to the linear regression problem.
          |The default value is "auto" which means that the solver algorithm is
          |selected automatically.""".stripMargin))

  setDefault(solver, SolverChoice.Auto())
}

object SolverChoice {

  sealed abstract class SolverOption(override val name: String) extends Choice {

    override val params: Array[Param[_]] = Array()

    override val choiceOrder: List[Class[_ <: SolverOption]] = List(
      classOf[Auto],
      classOf[Normal],
      classOf[LBFGS]
    )
  }

  case class Auto() extends SolverOption("auto")
  case class Normal() extends SolverOption("normal")
  case class LBFGS() extends SolverOption("l-bfgs")
} 
Example 135
Source File: HasMinTermsFrequencyParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.params.validators.RangeValidator
import ai.deepsense.deeplang.params.wrappers.spark.DoubleParamWrapper

trait HasMinTermsFrequencyParam
  extends HasInputColumn
  with HasOutputColumn {

  val minTF = new DoubleParamWrapper[ml.param.Params { val minTF: ml.param.DoubleParam }](
    name = "min term frequency",
    description =
      Some("""A filter to ignore rare words in a document. For each document, terms with
        |a frequency/count less than the given threshold are ignored. If this is an integer >= 1,
        |then this specifies a count (of times the term must appear in the document); if this is
        |a double in [0,1), then it specifies a fraction (out of the document's token count).
        |Note that the parameter is only used in transform of CountVectorizer model and does not
        |affect fitting.""".stripMargin),
    sparkParamGetter = _.minTF,
    RangeValidator(0.0, Double.MaxValue))
  setDefault(minTF, 1.0)

  def setMinTF(value: Double): this.type = {
    set(minTF, value)
  }
} 
Example 136
Source File: HasSubsamplingRateParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.validators.RangeValidator
import ai.deepsense.deeplang.params.wrappers.spark.{DoubleParamWrapper, IntParamWrapper}

trait HasSubsamplingRateParam extends Params {

  val subsamplingRate =
    new DoubleParamWrapper[ml.param.Params { val subsamplingRate: ml.param.DoubleParam }](
      name = "subsampling rate",
      description =
        Some("The fraction of the training data used for learning each decision tree."),
      sparkParamGetter = _.subsamplingRate,
      RangeValidator(0.0, 1.0, beginIncluded = false))
  setDefault(subsamplingRate, 1.0)

} 
Example 137
Source File: HasOptionalQuantilesColumnParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.param.{Param => SparkParam}

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.choice.{Choice, ChoiceParam}
import ai.deepsense.deeplang.params.wrappers.spark.{ParamsWithSparkWrappers, SingleColumnCreatorParamWrapper}

trait HasOptionalQuantilesColumnParam extends Params {

  val optionalQuantilesColumn =
    new ChoiceParam[OptionalQuantilesColumnChoice.QuantilesColumnOption](
      name = "use custom quantiles",
      description =
        Some("""Param for quantiles column name.
          |This column will output quantiles of corresponding
          |quantileProbabilities if it is set.""".stripMargin))

  setDefault(optionalQuantilesColumn, OptionalQuantilesColumnChoice.QuantilesColumnNoOption())
}

object OptionalQuantilesColumnChoice {

  sealed trait QuantilesColumnOption extends Choice with ParamsWithSparkWrappers {
    override val choiceOrder: List[Class[_ <: QuantilesColumnOption]] = List(
      classOf[QuantilesColumnNoOption],
      classOf[QuantilesColumnYesOption])
  }

  case class QuantilesColumnYesOption() extends QuantilesColumnOption {
    val quantilesColumn = new SingleColumnCreatorParamWrapper[
        ml.param.Params { val quantilesCol: SparkParam[String]}](
      name = "quantiles column",
      description = Some("The quantiles column for a model."),
      sparkParamGetter = _.quantilesCol)
    setDefault(quantilesColumn, "quantiles")

    override val name = "yes"
    override val params: Array[ai.deepsense.deeplang.params.Param[_]] = Array(quantilesColumn)
  }

  case class QuantilesColumnNoOption() extends QuantilesColumnOption {
    override val name = "no"
    override val params: Array[ai.deepsense.deeplang.params.Param[_]] = Array()
  }
} 
Example 138
Source File: HasInputColumn.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.selections.NameSingleColumnSelection
import ai.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasInputColumn extends Params {

  val inputColumn = new SingleColumnSelectorParamWrapper[
      ml.param.Params { val inputCol: ml.param.Param[String] }](
    name = "input column",
    description = Some("The input column name."),
    sparkParamGetter = _.inputCol,
    portIndex = 0)

  def setInputColumn(value: String): this.type = {
    set(inputColumn, NameSingleColumnSelection(value))
  }
} 
Example 139
Source File: HasMaxBinsParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.regression.RandomForestRegressor

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.validators.RangeValidator
import ai.deepsense.deeplang.params.wrappers.spark.{IntParamWrapper, LongParamWrapper}

trait HasMaxBinsParam extends Params {

  val maxBins = new IntParamWrapper[ml.param.Params { val maxBins: ml.param.IntParam }](
    name = "max bins",
    description = Some("The maximum number of bins used for discretizing continuous features " +
      "and for choosing how to split on features at each node. " +
      "More bins give higher granularity. " +
      "Must be >= 2 and >= number of categories in any categorical feature."),
    sparkParamGetter = _.maxBins,
    RangeValidator(2.0, Int.MaxValue, step = Some(1.0)))
  setDefault(maxBins, 32.0)

} 
Example 140
Source File: HasUserColumnParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.selections.NameSingleColumnSelection
import ai.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasUserColumnParam extends Params {

  val userColumn =
    new SingleColumnSelectorParamWrapper[
      ml.param.Params { val userCol: ml.param.Param[String] }](
      name = "user column",
      description = Some("The column for user ids."),
      sparkParamGetter = _.userCol,
      portIndex = 0)
  setDefault(userColumn, NameSingleColumnSelection("user"))
} 
Example 141
Source File: HasFeaturesColumnParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.selections.{NameSingleColumnSelection, SingleColumnSelection}
import ai.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasFeaturesColumnParam extends Params {

  val featuresColumn =
    new SingleColumnSelectorParamWrapper[
      ml.param.Params { val featuresCol: ml.param.Param[String] }](
      name = "features column",
      description = Some("The features column for model fitting."),
      sparkParamGetter = _.featuresCol,
      portIndex = 0)
  setDefault(featuresColumn, NameSingleColumnSelection("features"))

  def setFeaturesColumn(value: SingleColumnSelection): this.type = set(featuresColumn, value)
} 
Example 142
Source File: HasLabelColumnParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.selections.{SingleColumnSelection, NameSingleColumnSelection}
import ai.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasLabelColumnParam extends Params {

  val labelColumn =
    new SingleColumnSelectorParamWrapper[
        ml.param.Params { val labelCol: ml.param.Param[String] }](
      name = "label column",
      description = Some("The label column for model fitting."),
      sparkParamGetter = _.labelCol,
      portIndex = 0)
  setDefault(labelColumn, NameSingleColumnSelection("label"))

  def setLabelColumn(value: SingleColumnSelection): this.type = set(labelColumn, value)
} 
Example 143
Source File: HasElasticNetParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.validators.RangeValidator
import ai.deepsense.deeplang.params.wrappers.spark.DoubleParamWrapper

trait HasElasticNetParam extends Params {

  val elasticNetParam = new DoubleParamWrapper[
      ml.param.Params { val elasticNetParam: ml.param.DoubleParam }](
    name = "elastic net param",
    description = Some("The ElasticNet mixing parameter. " +
      "For alpha = 0, the penalty is an L2 penalty. For alpha = 1, it is an L1 penalty."),
    sparkParamGetter = _.elasticNetParam,
    validator = RangeValidator(0.0, 1.0))
  setDefault(elasticNetParam, 0.0)
} 
Example 144
Source File: HasOptionalWeightColumnParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.param.{Param => SparkParam}

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.choice.{Choice, ChoiceParam}
import ai.deepsense.deeplang.params.selections.{NameSingleColumnSelection, SingleColumnSelection}
import ai.deepsense.deeplang.params.wrappers.spark.{ParamsWithSparkWrappers, SingleColumnSelectorParamWrapper}

trait HasOptionalWeightColumnParam extends Params {

  val optionalWeightColumn =
    new ChoiceParam[OptionalWeightColumnChoice.WeightColumnOption](
      name = "use custom weights",
      description =
        Some("""Whether to over-/under-sample training instances according to the given weights in
          |the `weight column`. If the `weight column` is not specified,
          |all instances are treated equally with a weight 1.0.""".stripMargin))

  setDefault(optionalWeightColumn, OptionalWeightColumnChoice.WeightColumnNoOption())
}

object OptionalWeightColumnChoice {

  sealed trait WeightColumnOption
    extends Choice with ParamsWithSparkWrappers {
    override val choiceOrder: List[Class[_ <: WeightColumnOption]] = List(
      classOf[WeightColumnNoOption],
      classOf[WeightColumnYesOption])
  }

  case class WeightColumnYesOption() extends WeightColumnOption {
    val weightColumn = new SingleColumnSelectorParamWrapper[
      ml.param.Params { val weightCol: SparkParam[String]}](
      name = "weight column",
      description = Some("The weight column for a model."),
      sparkParamGetter = _.weightCol,
      portIndex = 0)
    setDefault(weightColumn, NameSingleColumnSelection("weight"))

    def getWeightColumn: SingleColumnSelection = $(weightColumn)
    def setWeightColumn(value: SingleColumnSelection): this.type = set(weightColumn -> value)

    override val name = "yes"
    override val params: Array[ai.deepsense.deeplang.params.Param[_]] = Array(weightColumn)
  }

  case class WeightColumnNoOption() extends WeightColumnOption {
    override val name = "no"
    override val params: Array[ai.deepsense.deeplang.params.Param[_]] = Array()
  }
} 
Example 145
Source File: HasMinInstancePerNodeParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.regression.RandomForestRegressor

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.validators.RangeValidator
import ai.deepsense.deeplang.params.wrappers.spark.{IntParamWrapper, DoubleParamWrapper}

trait HasMinInstancePerNodeParam extends Params {

  val minInstancesPerNode =
    new IntParamWrapper[ml.param.Params { val minInstancesPerNode: ml.param.IntParam }](
      name = "min instances per node",
      description = Some("The minimum number of instances each child must have after split. " +
        "If a split causes the left or right child to have fewer instances than the parameter's " +
        "value, the split will be discarded as invalid."),
      sparkParamGetter = _.minInstancesPerNode,
      RangeValidator(1.0, Int.MaxValue, step = Some(1.0)))
  setDefault(minInstancesPerNode, 1.0)

} 
Example 146
Source File: HasRawPredictionColumnParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.selections.NameSingleColumnSelection
import ai.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasRawPredictionColumnParam extends Params {

  val rawPredictionColumn =
    new SingleColumnSelectorParamWrapper[
        ml.param.Params { val rawPredictionCol: ml.param.Param[String] }](
      name = "raw prediction column",
      description = Some("The raw prediction (confidence) column."),
      sparkParamGetter = _.rawPredictionCol,
      portIndex = 0)
  setDefault(rawPredictionColumn, NameSingleColumnSelection("rawPrediction"))
} 
Example 147
Source File: HasPredictionColumnSelectorParam.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.params.common

import scala.language.reflectiveCalls

import org.apache.spark.ml

import ai.deepsense.deeplang.params.Params
import ai.deepsense.deeplang.params.selections.NameSingleColumnSelection
import ai.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

trait HasPredictionColumnSelectorParam extends Params {

  val predictionColumn =
    new SingleColumnSelectorParamWrapper[
        ml.param.Params { val predictionCol: ml.param.Param[String] }](
      name = "prediction column",
      description = Some("The prediction column."),
      sparkParamGetter = _.predictionCol,
      portIndex = 0)
  setDefault(predictionColumn, NameSingleColumnSelection("prediction"))
} 
Example 148
Source File: QuantileDiscretizerModel.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.models

import scala.language.reflectiveCalls

import org.apache.spark.ml.feature.{Bucketizer => SparkQuantileDiscretizerModel, QuantileDiscretizer => SparkQuantileDiscretizer}

import ai.deepsense.deeplang.ExecutionContext
import ai.deepsense.deeplang.doperables.SparkSingleColumnModelWrapper
import ai.deepsense.deeplang.doperables.report.CommonTablesGenerators.SparkSummaryEntry
import ai.deepsense.deeplang.doperables.report.{CommonTablesGenerators, Report}
import ai.deepsense.deeplang.doperables.serialization.SerializableSparkModel
import ai.deepsense.deeplang.params.Param

class QuantileDiscretizerModel
  extends SparkSingleColumnModelWrapper[SparkQuantileDiscretizerModel, SparkQuantileDiscretizer] {

  
  override protected def getSpecificParams: Array[Param[_]] = Array()

  override def report(extended: Boolean = true): Report = {
    val summary =
      List(
        SparkSummaryEntry(
          name = "splits",
          value = sparkModel.getSplits,
          description = "Split points for mapping continuous features into buckets."))

    super.report(extended)
      .withAdditionalTable(CommonTablesGenerators.modelSummary(summary))
  }

  override protected def loadModel(
      ctx: ExecutionContext,
      path: String): SerializableSparkModel[SparkQuantileDiscretizerModel] = {
    new SerializableSparkModel(SparkQuantileDiscretizerModel.load(path))
  }

} 
Example 149
Source File: PCAModel.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.models

import scala.language.reflectiveCalls

import org.apache.spark.ml.feature.{PCA => SparkPCA, PCAModel => SparkPCAModel}

import ai.deepsense.deeplang.ExecutionContext
import ai.deepsense.deeplang.doperables.SparkSingleColumnModelWrapper
import ai.deepsense.deeplang.doperables.report.{CommonTablesGenerators, Report}
import ai.deepsense.deeplang.doperables.serialization.SerializableSparkModel
import ai.deepsense.deeplang.params.Param
import ai.deepsense.sparkutils.ML

class PCAModel
  extends SparkSingleColumnModelWrapper[SparkPCAModel, SparkPCA] {

  override protected def getSpecificParams: Array[Param[_]] = Array()

  override def report(extended: Boolean = true): Report = {
    super.report(extended)
      .withAdditionalTable(CommonTablesGenerators.denseMatrix(
        name = "A Principal Components Matrix",
        description = "Each column is one principal component.",
        matrix = ML.ModelParams.pcFromPCAModel(sparkModel)))
  }

  override protected def loadModel(
      ctx: ExecutionContext,
      path: String): SerializableSparkModel[SparkPCAModel] = {
    new SerializableSparkModel(SparkPCAModel.load(path))
  }
} 
Example 150
Source File: GBTRegression.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.regression.{GBTRegressionModel => SparkGBTRegressionModel, GBTRegressor => SparkGBTRegressor}

import ai.deepsense.deeplang.doperables.SparkEstimatorWrapper
import ai.deepsense.deeplang.doperables.spark.wrappers.models.GBTRegressionModel
import ai.deepsense.deeplang.doperables.spark.wrappers.params.GBTParams
import ai.deepsense.deeplang.doperables.spark.wrappers.params.common.HasRegressionImpurityParam
import ai.deepsense.deeplang.params.Param
import ai.deepsense.deeplang.params.choice.Choice
import ai.deepsense.deeplang.params.wrappers.spark.ChoiceParamWrapper

class GBTRegression
  extends SparkEstimatorWrapper[
    SparkGBTRegressionModel,
    SparkGBTRegressor,
    GBTRegressionModel]
  with GBTParams
  with HasRegressionImpurityParam {

  import GBTRegression._

  override lazy val maxIterationsDefault = 20.0

  val lossType = new ChoiceParamWrapper[
    ml.param.Params { val lossType: ml.param.Param[String] }, LossType](
    name = "loss function",
    description = Some("The loss function which GBT tries to minimize."),
    sparkParamGetter = _.lossType)
  setDefault(lossType, Squared())

  override val params: Array[Param[_]] = Array(
    impurity,
    lossType,
    maxBins,
    maxDepth,
    maxIterations,
    minInfoGain,
    minInstancesPerNode,
    seed,
    stepSize,
    subsamplingRate,
    labelColumn,
    featuresColumn,
    predictionColumn)
}

object GBTRegression {

  sealed abstract class LossType(override val name: String) extends Choice {
    override val params: Array[Param[_]] = Array()

    override val choiceOrder: List[Class[_ <: Choice]] = List(
      classOf[Squared],
      classOf[Absolute]
    )
  }
  case class Squared() extends LossType("squared")
  case class Absolute() extends LossType("absolute")

} 
Example 151
Source File: NaiveBayes.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.classification.{NaiveBayes => SparkNaiveBayes, NaiveBayesModel => SparkNaiveBayesModel}

import ai.deepsense.deeplang.doperables.SparkEstimatorWrapper
import ai.deepsense.deeplang.doperables.spark.wrappers.estimators.NaiveBayes.{ModelType, Multinomial}
import ai.deepsense.deeplang.doperables.spark.wrappers.models.NaiveBayesModel
import ai.deepsense.deeplang.doperables.spark.wrappers.params.common._
import ai.deepsense.deeplang.params.Param
import ai.deepsense.deeplang.params.choice.Choice
import ai.deepsense.deeplang.params.validators.RangeValidator
import ai.deepsense.deeplang.params.wrappers.spark.{ChoiceParamWrapper, DoubleParamWrapper}

class NaiveBayes
  extends SparkEstimatorWrapper[
    SparkNaiveBayesModel,
    SparkNaiveBayes,
    NaiveBayesModel]
  with ProbabilisticClassifierParams
  with HasLabelColumnParam {

  val smoothing = new DoubleParamWrapper[ml.param.Params { val smoothing: ml.param.DoubleParam }](
    name = "smoothing",
    description = Some("The smoothing parameter."),
    sparkParamGetter = _.smoothing,
    validator = RangeValidator(begin = 0.0, end = Double.MaxValue))
  setDefault(smoothing, 1.0)

  val modelType =
    new ChoiceParamWrapper[ml.param.Params { val modelType: ml.param.Param[String] }, ModelType](
      name = "modelType",
      description = Some("The model type."),
      sparkParamGetter = _.modelType)
  setDefault(modelType, Multinomial())


  override val params: Array[Param[_]] = Array(
    smoothing,
    modelType,
    labelColumn,
    featuresColumn,
    probabilityColumn,
    rawPredictionColumn,
    predictionColumn)
}

object NaiveBayes {

  sealed abstract class ModelType(override val name: String) extends Choice {

    override val params: Array[Param[_]] = Array()

    override val choiceOrder: List[Class[_ <: Choice]] = List(
      classOf[Multinomial],
      classOf[Bernoulli]
    )
  }

  case class Multinomial() extends ModelType("multinomial")

  case class Bernoulli() extends ModelType("bernoulli")
} 
Example 152
Source File: StringIndexerEstimator.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml.feature.{StringIndexer => SparkStringIndexer, StringIndexerModel => SparkStringIndexerModel}

import ai.deepsense.deeplang.doperables.multicolumn.MultiColumnParams.SingleOrMultiColumnChoices.SingleColumnChoice
import ai.deepsense.deeplang.doperables.spark.wrappers.models.{MultiColumnStringIndexerModel, SingleColumnStringIndexerModel, StringIndexerModel}
import ai.deepsense.deeplang.doperables.{SparkMultiColumnEstimatorWrapper, SparkSingleColumnEstimatorWrapper}
import ai.deepsense.deeplang.params.Param

class StringIndexerEstimator
  extends SparkMultiColumnEstimatorWrapper[
    SparkStringIndexerModel,
    SparkStringIndexer,
    StringIndexerModel,
    SingleColumnStringIndexerModel,
    SingleStringIndexer,
    MultiColumnStringIndexerModel] {

  setDefault(singleOrMultiChoiceParam, SingleColumnChoice())

  override def getSpecificParams: Array[Param[_]] = Array()
}

class SingleStringIndexer
  extends SparkSingleColumnEstimatorWrapper[
    SparkStringIndexerModel,
    SparkStringIndexer,
    SingleColumnStringIndexerModel] {

  override def getSpecificParams: Array[Param[_]] = Array()
} 
Example 153
Source File: CountVectorizerEstimator.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.feature.{CountVectorizer => SparkCountVectorizer, CountVectorizerModel => SparkCountVectorizerModel}

import ai.deepsense.deeplang.doperables.SparkSingleColumnEstimatorWrapper
import ai.deepsense.deeplang.doperables.spark.wrappers.models.CountVectorizerModel
import ai.deepsense.deeplang.doperables.spark.wrappers.params.common._
import ai.deepsense.deeplang.params.Param
import ai.deepsense.deeplang.params.validators.RangeValidator
import ai.deepsense.deeplang.params.wrappers.spark.{DoubleParamWrapper, IntParamWrapper}

class CountVectorizerEstimator
  extends SparkSingleColumnEstimatorWrapper[
    SparkCountVectorizerModel,
    SparkCountVectorizer,
    CountVectorizerModel]
  with HasMinTermsFrequencyParam {

  val minDF = new DoubleParamWrapper[ml.param.Params { val minDF: ml.param.DoubleParam }](
    name = "min different documents",
    description = Some("Specifies the minimum number of different documents " +
      "a term must appear in to be included in the vocabulary."),
    sparkParamGetter = _.minDF,
    RangeValidator(0.0, Double.MaxValue))
  setDefault(minDF, 1.0)

  val vocabSize = new IntParamWrapper[ml.param.Params { val vocabSize: ml.param.IntParam }](
    name = "max vocabulary size",
    description = Some("The maximum size of the vocabulary."),
    sparkParamGetter = _.vocabSize,
    RangeValidator(0.0, Int.MaxValue, beginIncluded = false, step = Some(1.0)))
  setDefault(vocabSize, (1 << 18).toDouble)

  override protected def getSpecificParams: Array[Param[_]] = Array(vocabSize, minDF, minTF)
} 
Example 154
Source File: ChiSqSelectorEstimator.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.feature.{ChiSqSelector => SparkChiSqSelector, ChiSqSelectorModel => SparkChiSqSelectorModel}

import ai.deepsense.deeplang.doperables.SparkEstimatorWrapper
import ai.deepsense.deeplang.doperables.spark.wrappers.models.ChiSqSelectorModel
import ai.deepsense.deeplang.doperables.spark.wrappers.params.common.{HasFeaturesColumnParam, HasLabelColumnParam, HasOutputColumn}
import ai.deepsense.deeplang.params.Param
import ai.deepsense.deeplang.params.validators.RangeValidator
import ai.deepsense.deeplang.params.wrappers.spark.IntParamWrapper

class ChiSqSelectorEstimator
  extends SparkEstimatorWrapper[
    SparkChiSqSelectorModel,
    SparkChiSqSelector,
    ChiSqSelectorModel]
  with HasFeaturesColumnParam
  with HasOutputColumn
  with HasLabelColumnParam{

  val numTopFeatures = new IntParamWrapper[
    ml.param.Params { val numTopFeatures: ml.param.IntParam }](
    name = "num top features",
    description = Some("Number of features that selector will select, ordered by statistics value " +
      "descending. If the real number of features is lower, then this will select all " +
      "features."),
    sparkParamGetter = _.numTopFeatures,
    validator = RangeValidator(begin = 1.0, end = Int.MaxValue, step = Some(1.0)))
  setDefault(numTopFeatures -> 50)

  override val params: Array[Param[_]] = Array(
    numTopFeatures,
    featuresColumn,
    outputColumn,
    labelColumn)

  def setNumTopFeatures(value: Int): this.type = set(numTopFeatures -> value)
} 
Example 155
Source File: QuantileDiscretizerEstimator.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.feature.{Bucketizer => SparkQuantileDiscretizerModel, QuantileDiscretizer => SparkQuantileDiscretizer}

import ai.deepsense.deeplang.doperables.SparkSingleColumnEstimatorWrapper
import ai.deepsense.deeplang.doperables.spark.wrappers.models.QuantileDiscretizerModel
import ai.deepsense.deeplang.params.Param
import ai.deepsense.deeplang.params.validators.RangeValidator
import ai.deepsense.deeplang.params.wrappers.spark.IntParamWrapper

class QuantileDiscretizerEstimator
  extends SparkSingleColumnEstimatorWrapper[
    SparkQuantileDiscretizerModel,
    SparkQuantileDiscretizer,
    QuantileDiscretizerModel] {

  val numBuckets = new IntParamWrapper[ml.param.Params { val numBuckets: ml.param.IntParam }](
    name = "num buckets",
    description = Some("Maximum number of buckets (quantiles or categories) " +
      "into which the data points are grouped. Must be >= 2."),
    sparkParamGetter = _.numBuckets,
    RangeValidator(2.0, Int.MaxValue, step = Some(1.0)))
  setDefault(numBuckets, 2.0)

  override protected def getSpecificParams: Array[Param[_]] = Array(numBuckets)

  def setNumBuckets(value: Int): this.type = set(numBuckets -> value)
} 
Example 156
Source File: AFTSurvivalRegression.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.ml
import org.apache.spark.ml.regression.{AFTSurvivalRegression => SparkAFTSurvivalRegression, AFTSurvivalRegressionModel => SparkAFTSurvivalRegressionModel}

import ai.deepsense.deeplang.doperables.SparkEstimatorWrapper
import ai.deepsense.deeplang.doperables.spark.wrappers.models.AFTSurvivalRegressionModel
import ai.deepsense.deeplang.doperables.spark.wrappers.params.AFTSurvivalRegressionParams
import ai.deepsense.deeplang.doperables.spark.wrappers.params.common.{HasFitIntercept, HasLabelColumnParam, HasMaxIterationsParam, HasTolerance}
import ai.deepsense.deeplang.params.Param
import ai.deepsense.deeplang.params.selections.NameSingleColumnSelection
import ai.deepsense.deeplang.params.wrappers.spark.SingleColumnSelectorParamWrapper

class AFTSurvivalRegression
  extends SparkEstimatorWrapper[
    SparkAFTSurvivalRegressionModel,
    SparkAFTSurvivalRegression,
    AFTSurvivalRegressionModel]
  with AFTSurvivalRegressionParams
  with HasLabelColumnParam
  with HasMaxIterationsParam
  with HasTolerance
  with HasFitIntercept {

  val censorColumn =
    new SingleColumnSelectorParamWrapper[
      ml.param.Params { val censorCol: ml.param.Param[String] }](
      name = "censor column",
      description = Some("""Param for censor column name.
                      |The value of this column could be 0 or 1.
                      |If the value is 1, it means the event has occurred i.e. uncensored;
                      |otherwise censored.""".stripMargin),
      sparkParamGetter = _.censorCol,
      portIndex = 0)
  setDefault(censorColumn, NameSingleColumnSelection("censor"))

  override val params: Array[Param[_]] = Array(
    fitIntercept,
    maxIterations,
    tolerance,
    labelColumn,
    censorColumn,
    featuresColumn,
    predictionColumn,
    quantileProbabilities,
    optionalQuantilesColumn)
} 
Example 157
Source File: EstimatorModelWrapperFixtures.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.deeplang.doperables.spark.wrappers.estimators

import scala.language.reflectiveCalls

import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.ml
import org.apache.spark.ml.param.{ParamMap, Param => SparkParam}
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}

import ai.deepsense.deeplang.ExecutionContext
import ai.deepsense.deeplang.doperables.report.Report
import ai.deepsense.deeplang.doperables.serialization.SerializableSparkModel
import ai.deepsense.deeplang.doperables.{SparkEstimatorWrapper, SparkModelWrapper}
import ai.deepsense.deeplang.params.wrappers.spark.SingleColumnCreatorParamWrapper
import ai.deepsense.deeplang.params.{Param, Params}
import ai.deepsense.sparkutils.ML

object EstimatorModelWrapperFixtures {

  class SimpleSparkModel private[EstimatorModelWrapperFixtures]()
    extends ML.Model[SimpleSparkModel] {

    def this(x: String) = this()

    override val uid: String = "modelId"

    val predictionCol = new SparkParam[String](uid, "name", "description")

    def setPredictionCol(value: String): this.type = set(predictionCol, value)

    override def copy(extra: ParamMap): this.type = defaultCopy(extra)

    override def transformDF(dataset: DataFrame): DataFrame = {
      dataset.selectExpr("*", "1 as " + $(predictionCol))
    }

    @DeveloperApi
    override def transformSchema(schema: StructType): StructType = ???
  }

  class SimpleSparkEstimator extends ML.Estimator[SimpleSparkModel] {

    def this(x: String) = this()

    override val uid: String = "estimatorId"

    val predictionCol = new SparkParam[String](uid, "name", "description")

    override def fitDF(dataset: DataFrame): SimpleSparkModel =
      new SimpleSparkModel().setPredictionCol($(predictionCol))

    override def copy(extra: ParamMap): ML.Estimator[SimpleSparkModel] = defaultCopy(extra)

    @DeveloperApi
    override def transformSchema(schema: StructType): StructType = {
      schema.add(StructField($(predictionCol), IntegerType, nullable = false))
    }
  }

  trait HasPredictionColumn extends Params {
    val predictionColumn = new SingleColumnCreatorParamWrapper[
        ml.param.Params { val predictionCol: SparkParam[String] }](
      "prediction column",
      None,
      _.predictionCol)
    setDefault(predictionColumn, "abcdefg")

    def getPredictionColumn(): String = $(predictionColumn)
    def setPredictionColumn(value: String): this.type = set(predictionColumn, value)
  }

  class SimpleSparkModelWrapper
    extends SparkModelWrapper[SimpleSparkModel, SimpleSparkEstimator]
    with HasPredictionColumn {

    override val params: Array[Param[_]] = Array(predictionColumn)
    override def report(extended: Boolean = true): Report = ???

    override protected def loadModel(
      ctx: ExecutionContext,
      path: String): SerializableSparkModel[SimpleSparkModel] = ???
  }

  class SimpleSparkEstimatorWrapper
    extends SparkEstimatorWrapper[SimpleSparkModel, SimpleSparkEstimator, SimpleSparkModelWrapper]
    with HasPredictionColumn {

    override val params: Array[Param[_]] = Array(predictionColumn)
    override def report(extended: Boolean = true): Report = ???
  }
} 
Example 158
Source File: GenericDBIOs.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
package ai.deepsense.commons.service.db.dbio

import java.util.UUID

import scala.language.reflectiveCalls

import slick.dbio.Effect.{Read, Write}
import slick.driver.JdbcProfile

import ai.deepsense.commons.service.api.CommonApiExceptions

abstract class GenericDBIOs[Api, Db <: {def id : UUID}] {

  import scala.concurrent.ExecutionContext.Implicits.global

  import GenericDBIOs._

  val api: JdbcProfile#API
  val table: api.TableQuery[_ <: api.Table[Db] {
    def id: api.Rep[UUID]
  }]

  import api._

  val fromDB: Db => Api
  val fromApi: Api => Db

  def get(id: UUID): api.DBIOAction[Api, NoStream, Read] = for {
    entityOpt <- table.filter(_.id === id).result.headOption
    entity <- checkExists(id, entityOpt)
  } yield fromDB(entity)

  def getAll: DBIOAction[List[Api], NoStream, Read] = for {
    entities <- table.result
  } yield entities.map(fromDB).toList

  def insertOrUpdate(id: UUID, apiEntity: Api): DBIOAction[Api, NoStream, Write with Read] = {
    val entity = fromApi(apiEntity)
    for {
      _ <- pathParamsMustMatchBodyParams(id, entity)
      insertedCount <- table.insertOrUpdate(entity)
      justInserted <- table.filter(_.id === id).result.head
    } yield fromDB(justInserted)
  }

  def delete(id: UUID): DBIOAction[Unit, NoStream, Write] = for {
    _ <- table.filter(_.id === id).delete
  } yield ()

  private def pathParamsMustMatchBodyParams(id: UUID, entity: Db) = {
    if (entity.id == id) {
      DBIO.successful(())
    } else {
      DBIO.failed(CommonApiExceptions.pathIdMustMatchBodyId(id, entity.id))
    }
  }

}

object GenericDBIOs {

  import slick.dbio._

  def checkExists[T](id: UUID, option: Option[T]): DBIOAction[T, NoStream, Effect] = option match {
    case Some(value) => DBIO.successful(value)
    case None => DBIO.failed(CommonApiExceptions.doesNotExist(id))
  }

} 
Example 159
Source File: Main.scala    From scala-debugger   with Apache License 2.0 5 votes vote down vote up
package org.scaladebugger.test

import scala.language.reflectiveCalls

object Main extends App {
  val x = 3
  var y = 4
  val x123 = "huh?"

  def runMe(x: Int = 3) = println(x)

  val myClass = new MyClass((x) => (z) => x + z) {
    def anotherMethod = {
      val something = 1
      something + "asdf"
    }
  }

  while (true) {
    val z = x + y

    myClass.process(3)
    myClass.anotherMethod

    val func = (x: Int, y: Int) => {
      println(s"Adding $x + $y")
      x + y
    }

    println("Running " + runMe())
    Thread.sleep(1000)
    println("Past sleep!")

    println(z)
  }

  y = 5
  runMe()
}

class MyClass(www: Int => Int => Int) {
  def process(x: Int) = www(x)(x)
} 
Example 160
Source File: DiskBlockManagerSuite.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.mockito.Mockito.{mock, when}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.Utils
//DiskBlockManager管理和维护了逻辑上的Block和存储在Disk上的物理的Block的映射。
//一般来说,一个逻辑的Block会根据它的BlockId生成的名字映射到一个物理上的文件
class DiskBlockManagerSuite extends SparkFunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  val blockManager = mock(classOf[BlockManager])
  when(blockManager.conf).thenReturn(testConf)
  //DiskBlockManager创建和维护逻辑块和物理磁盘位置之间的逻辑映射,默认情况下,一个块被映射到一个文件,其名称由其BlockId给出
  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    super.afterAll()
    Utils.deleteRecursively(rootDir0)
    Utils.deleteRecursively(rootDir1)
  }

  override def beforeEach() {
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(blockManager, conf)
  }

  override def afterEach() {
    diskBlockManager.stop()
  }

  test("basic block creation") {//基本块的创建
    val blockId = new TestBlockId("test")
    //DiskBlockManager创建和维护逻辑块和物理磁盘位置之间的逻辑映射,默认情况下,一个块被映射到一个文件,其名称由其BlockId给出
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {//枚举块
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 161
Source File: Master.scala    From mist   with Apache License 2.0 5 votes vote down vote up
package io.hydrosphere.mist.master

import io.hydrosphere.mist.utils.Logger

import scala.concurrent.Await
import scala.concurrent.duration.Duration
import scala.language.reflectiveCalls


object Master extends App with Logger {

  try {
    logger.info("Starting mist...")
    val appArguments = MasterAppArguments.parse(args) match {
      case Some(arg) => arg
      case None => sys.exit(1)
    }
    val config: MasterConfig = MasterConfig.load(appArguments.configPath)
    val starting = MasterServer.start(config, appArguments.routerConfigPath)
    val master = Await.result(starting, Duration.Inf)
    logger.info("Mist master started")


    sys addShutdownHook {
      logger.info("Received shutdown - start application termination")
      Await.result(master.stop(), Duration.Inf)
      logger.info("Mist master stopped")
    }
  } catch {
    case e: Throwable =>
      logger.error(s"Unexpected error: ${e.getMessage}", e)
      sys.exit(1)
  }


} 
Example 162
Source File: Replaceable.scala    From kubernetes-client   with Apache License 2.0 5 votes vote down vote up
package com.goyeau.kubernetes.client.operation

import scala.language.reflectiveCalls
import cats.effect.Sync
import com.goyeau.kubernetes.client.KubeConfig
import com.goyeau.kubernetes.client.util.CirceEntityCodec._
import com.goyeau.kubernetes.client.util.EnrichedStatus
import io.circe._
import io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta
import org.http4s._
import org.http4s.client.Client
import org.http4s.client.dsl.Http4sClientDsl
import org.http4s.Method._

private[client] trait Replaceable[F[_], Resource <: { def metadata: Option[ObjectMeta] }] extends Http4sClientDsl[F] {
  protected def httpClient: Client[F]
  implicit protected val F: Sync[F]
  protected def config: KubeConfig
  protected def resourceUri: Uri
  implicit protected def resourceEncoder: Encoder[Resource]

  def replace(resource: Resource): F[Status] =
    httpClient.fetch(
      PUT(
        resource,
        config.server.resolve(resourceUri) / resource.metadata.get.name.get,
        config.authorization.toSeq: _*
      )
    )(EnrichedStatus[F])
} 
Example 163
Source File: Creatable.scala    From kubernetes-client   with Apache License 2.0 5 votes vote down vote up
package com.goyeau.kubernetes.client.operation

import scala.language.reflectiveCalls
import cats.implicits._
import cats.effect.Sync
import com.goyeau.kubernetes.client.KubeConfig
import com.goyeau.kubernetes.client.util.CirceEntityCodec._
import com.goyeau.kubernetes.client.util.EnrichedStatus
import io.circe._
import io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta
import org.http4s._
import org.http4s.client.Client
import org.http4s.client.dsl.Http4sClientDsl
import org.http4s.headers.`Content-Type`
import org.http4s.Method._

private[client] trait Creatable[F[_], Resource <: { def metadata: Option[ObjectMeta] }] extends Http4sClientDsl[F] {
  protected def httpClient: Client[F]
  implicit protected val F: Sync[F]
  protected def config: KubeConfig
  protected def resourceUri: Uri
  implicit protected def resourceEncoder: Encoder[Resource]

  def create(resource: Resource): F[Status] =
    httpClient.fetch(POST(resource, config.server.resolve(resourceUri), config.authorization.toSeq: _*))(
      EnrichedStatus[F]
    )

  def createOrUpdate(resource: Resource): F[Status] = {
    val fullResourceUri = config.server.resolve(resourceUri) / resource.metadata.get.name.get
    def update =
      httpClient.fetch(
        PATCH(
          resource,
          fullResourceUri,
          `Content-Type`(MediaType.application.`merge-patch+json`) +: config.authorization.toSeq: _*
        )
      )(EnrichedStatus[F])

    httpClient
      .fetch(GET(fullResourceUri, config.authorization.toSeq: _*))(EnrichedStatus.apply[F])
      .flatMap {
        case status if status.isSuccess => update
        case Status.NotFound =>
          create(resource).recoverWith {
            case Status.Conflict => update
          }
      }
  }
} 
Example 164
Source File: GettableTests.scala    From kubernetes-client   with Apache License 2.0 5 votes vote down vote up
package com.goyeau.kubernetes.client.operation

import cats.Applicative
import cats.implicits._
import com.goyeau.kubernetes.client.KubernetesClient
import io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta
import org.http4s.client.UnexpectedStatus
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.OptionValues
import org.scalatest.matchers.should.Matchers

import scala.language.reflectiveCalls

trait GettableTests[F[_], Resource <: { def metadata: Option[ObjectMeta] }]
    extends AnyFlatSpec
    with Matchers
    with OptionValues
    with MinikubeClientProvider[F] {

  def namespacedApi(namespaceName: String)(implicit client: KubernetesClient[F]): Gettable[F, Resource]
  def createChecked(namespaceName: String, resourceName: String)(implicit client: KubernetesClient[F]): F[Resource]

  def getChecked(namespaceName: String, resourceName: String)(implicit client: KubernetesClient[F]): F[Resource] =
    for {
      resource <- namespacedApi(namespaceName).get(resourceName)
      _ = resource.metadata.value.namespace.value shouldBe namespaceName
      _ = resource.metadata.value.name.value shouldBe resourceName
    } yield resource

  "get" should s"get a $resourceName" in usingMinikube { implicit client =>
    for {
      namespaceName <- Applicative[F].pure(resourceName.toLowerCase)
      resourceName  <- Applicative[F].pure("some-resource-get")
      _             <- createChecked(namespaceName, resourceName)
      _             <- getChecked(namespaceName, resourceName)
    } yield ()
  }

  it should "fail on non existing namespace" in intercept[UnexpectedStatus] {
    usingMinikube(implicit client => getChecked("non-existing", "non-existing"))
  }

  it should s"fail on non existing $resourceName" in intercept[UnexpectedStatus] {
    usingMinikube { implicit client =>
      for {
        namespaceName <- Applicative[F].pure(resourceName.toLowerCase)
        _             <- getChecked(namespaceName, "non-existing")
      } yield ()
    }
  }
} 
Example 165
Source File: ListableTests.scala    From kubernetes-client   with Apache License 2.0 5 votes vote down vote up
package com.goyeau.kubernetes.client.operation

import cats.Applicative
import cats.implicits._
import com.goyeau.kubernetes.client.KubernetesClient
import com.goyeau.kubernetes.client.api.NamespacesApiTest
import io.k8s.apimachinery.pkg.apis.meta.v1.ObjectMeta
import org.scalatest.OptionValues
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers

import scala.language.reflectiveCalls

trait ListableTests[F[_], Resource <: { def metadata: Option[ObjectMeta] }, ResourceList <: { def items: Seq[Resource] }]
    extends AnyFlatSpec
    with Matchers
    with OptionValues
    with MinikubeClientProvider[F] {

  val resourceIsNamespaced = true

  def api(implicit client: KubernetesClient[F]): Listable[F, ResourceList]
  def namespacedApi(namespaceName: String)(implicit client: KubernetesClient[F]): Listable[F, ResourceList]
  def createChecked(namespaceName: String, resourceName: String, labels: Map[String, String] = Map.empty)(
      implicit client: KubernetesClient[F]
  ): F[Resource]

  def listContains(namespaceName: String, resourceNames: Seq[String], labels: Map[String, String] = Map.empty)(
      implicit client: KubernetesClient[F]
  ): F[ResourceList] =
    for {
      resourceList <- namespacedApi(namespaceName).list(labels)
      _ = (resourceList.items.map(_.metadata.value.name.value) should contain).allElementsOf(resourceNames)
    } yield resourceList

  def listAllContains(resourceNames: Seq[String])(
      implicit client: KubernetesClient[F]
  ): F[ResourceList] =
    for {
      resourceList <- api.list()
      _ = (resourceList.items.map(_.metadata.value.name.value) should contain).allElementsOf(resourceNames)
    } yield resourceList

  def listNotContains(namespaceName: String, resourceNames: Seq[String], labels: Map[String, String] = Map.empty)(
      implicit client: KubernetesClient[F]
  ): F[ResourceList] =
    for {
      resourceList <- namespacedApi(namespaceName).list(labels)
      _ = (resourceList.items.map(_.metadata.value.name.value) should contain).noElementsOf(resourceNames)
    } yield resourceList

  "list" should s"list ${resourceName}s" in usingMinikube { implicit client =>
    for {
      namespaceName <- Applicative[F].pure(resourceName.toLowerCase)
      resourceName  <- Applicative[F].pure("list-resource")
      _             <- listNotContains(namespaceName, Seq(resourceName))
      _             <- createChecked(namespaceName, resourceName)
      _             <- listContains(namespaceName, Seq(resourceName))
    } yield ()
  }

  "list" should s"list ${resourceName}s with a label" in usingMinikube { implicit client =>
    for {
      namespaceName         <- Applicative[F].pure(resourceName.toLowerCase)
      noLabelResourceName   <- Applicative[F].pure("no-label-resource")
      _                     <- createChecked(namespaceName, noLabelResourceName)
      withLabelResourceName <- Applicative[F].pure("label-resource")
      labels = Map("test" -> "1")
      _ <- createChecked(namespaceName, withLabelResourceName, labels)
      _ <- listNotContains(namespaceName, Seq(noLabelResourceName), labels)
      _ <- listContains(namespaceName, Seq(withLabelResourceName), labels)
    } yield ()
  }

  it should s"list ${resourceName}s in all namespaces" in usingMinikube { implicit client =>
    assume(resourceIsNamespaced)
    for {
      namespaceResourceNames <- Applicative[F].pure(
        (0 to 1).map(i => (s"${resourceName.toLowerCase}-$i", s"list-all-${resourceName.toLowerCase}-$i"))
      )
      _ <- namespaceResourceNames.toList.traverse {
        case (namespaceName, resourceName) =>
          NamespacesApiTest.createChecked[F](namespaceName) *> createChecked(namespaceName, resourceName)
      }
      _ <- listAllContains(namespaceResourceNames.map(_._2))
      _ <- namespaceResourceNames.toList.traverse {
        case (namespaceName, _) => client.namespaces.delete(namespaceName)
      }
    } yield ()
  }
} 
Example 166
Source File: YarnSchedulerBackendSuite.scala    From Spark-2.3.1   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.scheduler.cluster

import scala.language.reflectiveCalls

import org.mockito.Mockito.when
import org.scalatest.mockito.MockitoSugar

import org.apache.spark.{LocalSparkContext, SparkContext, SparkFunSuite}
import org.apache.spark.scheduler.TaskSchedulerImpl
import org.apache.spark.serializer.JavaSerializer

class YarnSchedulerBackendSuite extends SparkFunSuite with MockitoSugar with LocalSparkContext {

  test("RequestExecutors reflects node blacklist and is serializable") {
    sc = new SparkContext("local", "YarnSchedulerBackendSuite")
    val sched = mock[TaskSchedulerImpl]
    when(sched.sc).thenReturn(sc)
    val yarnSchedulerBackend = new YarnSchedulerBackend(sched, sc) {
      def setHostToLocalTaskCount(hostToLocalTaskCount: Map[String, Int]): Unit = {
        this.hostToLocalTaskCount = hostToLocalTaskCount
      }
    }
    val ser = new JavaSerializer(sc.conf).newInstance()
    for {
      blacklist <- IndexedSeq(Set[String](), Set("a", "b", "c"))
      numRequested <- 0 until 10
      hostToLocalCount <- IndexedSeq(
        Map[String, Int](),
        Map("a" -> 1, "b" -> 2)
      )
    } {
      yarnSchedulerBackend.setHostToLocalTaskCount(hostToLocalCount)
      when(sched.nodeBlacklist()).thenReturn(blacklist)
      val req = yarnSchedulerBackend.prepareRequestExecutors(numRequested)
      assert(req.requestedTotal === numRequested)
      assert(req.nodeBlacklist === blacklist)
      assert(req.hostToLocalTaskCount.keySet.intersect(blacklist).isEmpty)
      // Serialize to make sure serialization doesn't throw an error
      ser.serialize(req)
    }
    sc.stop()
  }

} 
Example 167
Source File: CsvDf.scala    From blog   with Apache License 2.0 5 votes vote down vote up
object CsvDf {

  def main(args: Array[String]): Unit = {

    import org.saddle.Index
    import org.saddle.io._

    val file = CsvFile("../r/cars93.csv")
    val df = CsvParser.parse(file).withColIndex(0)
    println(df)
    val df2 = df.rfilter(_("EngineSize").mapValues(CsvParser.parseDouble).at(0)<=4.0)
    println(df2)
    val wkg=df2.col("Weight").mapValues(CsvParser.parseDouble).mapValues(_*0.453592).setColIndex(Index("WeightKG"))
    val df3=df2.joinPreserveColIx(wkg.mapValues(_.toString))
    println(df3)

    import CsvImplicits._
    import scala.language.reflectiveCalls
    df3.writeCsvFile("saddle-out.csv")

  }

} 
Example 168
Source File: XORShiftRandomSuite.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.util.random

import org.scalatest.Matchers

import org.apache.commons.math3.stat.inference.ChiSquareTest

import org.apache.spark.SparkFunSuite
import org.apache.spark.util.Utils.times

import scala.language.reflectiveCalls

class XORShiftRandomSuite extends SparkFunSuite with Matchers {

  private def fixture = new {
    val seed = 1L
    val xorRand = new XORShiftRandom(seed)
    val hundMil = 1e8.toInt
  }

  
    val chiTest = new ChiSquareTest
    assert(chiTest.chiSquareTest(bins, 0.05) === false)
  }

  test ("XORShift with zero seed") {
    val random = new XORShiftRandom(0L)
    assert(random.nextInt() != 0)
  }

  test ("hashSeed has random bits throughout") {
    val totalBitCount = (0 until 10).map { seed =>
      val hashed = XORShiftRandom.hashSeed(seed)
      val bitCount = java.lang.Long.bitCount(hashed)
      // make sure we have roughly equal numbers of 0s and 1s.  Mostly just check that we
      // don't have all 0s or 1s in the high bits
      bitCount should be > 20
      bitCount should be < 44
      bitCount
    }.sum
    // and over all the seeds, very close to equal numbers of 0s & 1s
    totalBitCount should be > (32 * 10 - 30)
    totalBitCount should be < (32 * 10 + 30)
  }
} 
Example 169
Source File: DiskBlockManagerSuite.scala    From BigDatalog   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.storage

import java.io.{File, FileWriter}

import scala.language.reflectiveCalls

import org.mockito.Mockito.{mock, when}
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}

import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.util.Utils

class DiskBlockManagerSuite extends SparkFunSuite with BeforeAndAfterEach with BeforeAndAfterAll {
  private val testConf = new SparkConf(false)
  private var rootDir0: File = _
  private var rootDir1: File = _
  private var rootDirs: String = _

  val blockManager = mock(classOf[BlockManager])
  when(blockManager.conf).thenReturn(testConf)
  var diskBlockManager: DiskBlockManager = _

  override def beforeAll() {
    super.beforeAll()
    rootDir0 = Utils.createTempDir()
    rootDir1 = Utils.createTempDir()
    rootDirs = rootDir0.getAbsolutePath + "," + rootDir1.getAbsolutePath
  }

  override def afterAll() {
    super.afterAll()
    Utils.deleteRecursively(rootDir0)
    Utils.deleteRecursively(rootDir1)
  }

  override def beforeEach() {
    val conf = testConf.clone
    conf.set("spark.local.dir", rootDirs)
    diskBlockManager = new DiskBlockManager(blockManager, conf)
  }

  override def afterEach() {
    diskBlockManager.stop()
  }

  test("basic block creation") {
    val blockId = new TestBlockId("test")
    val newFile = diskBlockManager.getFile(blockId)
    writeToFile(newFile, 10)
    assert(diskBlockManager.containsBlock(blockId))
    newFile.delete()
    assert(!diskBlockManager.containsBlock(blockId))
  }

  test("enumerating blocks") {
    val ids = (1 to 100).map(i => TestBlockId("test_" + i))
    val files = ids.map(id => diskBlockManager.getFile(id))
    files.foreach(file => writeToFile(file, 10))
    assert(diskBlockManager.getAllBlocks.toSet === ids.toSet)
  }

  def writeToFile(file: File, numBytes: Int) {
    val writer = new FileWriter(file, true)
    for (i <- 0 until numBytes) writer.write(i)
    writer.close()
  }
} 
Example 170
Source File: SigmaDslStaginTests.scala    From sigmastate-interpreter   with MIT License 5 votes vote down vote up
package special.sigma

import special.wrappers.WrappersTests
import special.collection._
import scala.language.reflectiveCalls
import scalan.{SigmaLibrary, BaseCtxTests, BaseLiftableTests}
import sigmastate.eval.Extensions._
import sigmastate.eval.{IRContext, ErgoScriptTestkit}
import sigmastate.helpers.SigmaTestingCommons

class SigmaDslStaginTests extends BaseCtxTests with ErgoScriptTestkit with BaseLiftableTests {
  class Ctx extends TestContext with IRContext with LiftableTestKit {
  }

  test("invokeUnlifted") {
    val cake = new Ctx
    import cake._
    import Liftables._
    import Context._
    import Coll._
    import Box._
    import SigmaProp._
    import SigmaDslBuilder._
    import EnvRep._

    val dsl: SSigmaDslBuilder = sigmastate.eval.SigmaDsl
    type RSigmaDslBuilder = cake.SigmaDslBuilder
    type RContext = cake.Context
    type RBox = cake.Box
    type RSigmaProp = cake.SigmaProp
    val boxA1 = newAliceBox(1, 100)
    val boxA2 = newAliceBox(2, 200)
    val ctx: SContext = newContext(10, boxA1)
      .withInputs(boxA2)
      .withVariables(Map(1 -> toAnyValue(30), 2 -> toAnyValue(40)))
    val p1: SSigmaProp = new special.sigma.MockSigma(true)
    val p2: SSigmaProp = new special.sigma.MockSigma(false)

    cake.check(dsl,  { env: EnvRep[RSigmaDslBuilder] =>
      for { dsl <- env; arg <- lifted(true) } yield dsl.sigmaProp(arg) }, dsl.sigmaProp(true))

    cake.check(ctx, { env: EnvRep[RContext] => for { obj <- env } yield obj.SELF }, ctx.SELF)
    cake.check(ctx, { env: EnvRep[RContext] =>
      for { obj <- env; id <- lifted(1.toByte) } yield obj.getVar[Int](id) }, ctx.getVar[Int](1))

    cake.check(boxA1, { env: EnvRep[RBox] => for { obj <- env } yield obj.value }, boxA1.value)
    cake.check(boxA1, { env: EnvRep[RBox] => for { obj <- env } yield obj.creationInfo }, boxA1.creationInfo)
    cake.check(boxA1, { env: EnvRep[RBox] => for { obj <- env; arg <- lifted(1) } yield obj.getReg[Coll[Byte]](arg) }, boxA1.getReg[special.collection.Coll[Byte]](1))
    cake.check(boxA1, { env: EnvRep[RBox] => for { obj <- env } yield obj.registers }, boxA1.registers)

    cake.check(p1, { env: EnvRep[RSigmaProp] => for { p1 <- env; arg <- lifted(true) } yield p1 && arg }, p1 && true)
    cake.check(p1, { env: EnvRep[RSigmaProp] => for { p1 <- env; arg <- lifted(p2) } yield p1 && arg }, p1 && p2)
  }
} 
Example 171
Source File: CostedTests.scala    From sigmastate-interpreter   with MIT License 5 votes vote down vote up
package special.collections

import scala.collection.mutable
import scala.language.reflectiveCalls
import scalan.util.BenchmarkUtil._
import special.SpecialPredef

class CostedTests extends BaseCostedTests {

  class ThisCtx extends Ctx  {
  }
  lazy val ctx = new ThisCtx
  import ctx._
  import CSizePrim._
  import CSizePair._
  import CSizeColl._
  import CSizeOption._
  import Costed._
  import CCostedPair._
  import CCostedPrim._
  import CCostedColl._
  import CCostedOption._
  import CollBuilder._
  import CostedBuilder._
  import Coll._
  import WOption._
  import WSpecialPredef._
  import Liftables._

  def buildGraph[T](nIters: Int, name: String)(action: Int => Ref[T]) = {
    val buf = mutable.ArrayBuilder.make[Ref[T]]()
    measure(nIters) { i =>
      buf += action(i)
    }
    ctx.emit(name, buf.result(): _*)
  }

  lazy val l = toRep(10)
  lazy val r = toRep(10.toByte)
  lazy val lC = RCCostedPrim(l, 1, RCSizePrim(4L, element[Int]))
  lazy val rC = RCCostedPrim(r, 1, RCSizePrim(1L, element[Byte]))
  lazy val pC = RCCostedPair(lC, rC, 1)
  lazy val ppC = RCCostedPair(pC, pC, 1)

  ignore("dataSize of CostedPair") {
    val sizeD= pC.size
    val expected = RCSizePair(RCSizePrim(4L, element[Int]), RCSizePrim(1L, element[Byte]))
    sizeD shouldBe expected
  }

  ignore("dataSize of nested CostedPair") {
    val sizeD= ppC.size
    val ppSize = pC.size
    val expected  = RCSizePair(ppSize, ppSize)
    sizeD shouldBe expected
  }

  val Colls = new special.collection.CollOverArrayBuilder
  val xs = Colls.fromItems(10, 20, 30)
  lazy val xsSym: Ref[Coll[Int]] = liftConst(xs)
  lazy val xsCosts = liftConst(Colls.replicate(3, 0))
  lazy val IntSize: RSize[Int] = costedBuilder.mkSizePrim(4L, element[Int])
  lazy val xsSizes = colBuilder.replicate(3, IntSize)
  lazy val xsC = costedBuilder.mkCostedColl(xsSym, xsCosts, xsSizes, 0)

  test("dataSize of CostedColl") {
    val sizeD = xsC.size
    val expected = RCSizeColl(xsSizes)
    sizeD shouldBe expected
  }

  val opt: Option[Int] = Some(10)
  lazy val optSym = liftConst(opt)
  lazy val optSize = RWSpecialPredef.some(IntSize)
  lazy val optC = costedBuilder.mkCostedOption(optSym, RWSpecialPredef.some(0), optSize, 0)

  test("dataSize of CostedOption") {
    val sizeD = optC.size
    val expected = RCSizeOption(optSize)
    sizeD shouldBe expected
  }

}