scala.collection.immutable.Map Scala Examples

The following examples show how to use scala.collection.immutable.Map. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: RestApiClientConfiguration.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.restapi.conf

import scala.collection.immutable.Map
import scala.language.implicitConversions

import com.paypal.gimel.common.catalog.DataSetProperties
import com.paypal.gimel.common.conf.{GimelConstants, GimelProperties}
import com.paypal.gimel.logger.Logger


  def resolveUrl(props: Map[String, String]): String = {

    def MethodName: String = new Exception().getStackTrace().apply(1).getMethodName()
    logger.info(" @Begin --> " + MethodName)

    val criteriaKey: String = props(RestApiConfigs.urlPattern)
    val urlFromMap: String = props(criteriaKey)
    //    val propsToReplace: Map[String, String] = props.map { x =>
    //      val resolvedKey = if (x._1.startsWith(RestApiConstants.restApiPatternString)) x._1.stripPrefix("gimel.restapi.") else x._1
    //      (resolvedKey, x._2)
    //    }
    logger.info(s"Props are -->")
    props.foreach(println)
    val resolvedUrl: String = props.foldLeft(urlFromMap) { (url, eachProp) =>
      url.replaceAllLiterally(s"{${eachProp._1}}", eachProp._2)
    }
    logger.info(s"resolved URL is --> ${resolvedUrl}")
    resolvedUrl
  }

} 
Example 2
Source File: ConcordanceParser.scala    From CSYE7200   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.concordance

import scala.util.parsing.combinator._
import scala.util.parsing.input.Positional
import scala.io.Source
import scala.collection.immutable.Map


class ConcordanceParser extends RegexParsers {
  private val rWord = """[\w’]+[,;\.\-\?\!\—]?""".r
  def word: Parser[PositionalString] = positioned(regex(rWord) ^^ {w => PositionalString(w)})
  def sentence: Parser[Seq[PositionalString]] = rep(word)
}

case class PositionalString(s: String) extends Positional

object ConcordanceParser {
 
  def main(args: Array[String]): Unit = {
    val docs = for (f <- args) yield Source.fromFile(f).mkString
    val concordance = for (i <- docs.indices) yield (args(i),parseDoc(docs(i)))
    println(concordance)
    // an alternative way of looking at the data (gives doc, page, line and char numbers with each string)
    val q = for {(d,xxxx) <- concordance; (p,xxx) <- xxxx; (l,xx) <- xxx; (_,c,x) <- xx} yield (d, p,l,c,x)
    println(q)
    // yet another way to look at the data
    val concordanceMap = concordance.toMap
    println(concordanceMap)
  }
  
  private def parseDoc(content: String) = {
    val pages = for (p <- content.split("/p")) yield p
    for (i <- pages.indices) yield (i+1,parsePage(pages(i)))
  }

  private def parsePage(content: String) = {
    val lines = for (l <- content.split("\n")) yield l
    for (i <- lines.indices) yield (i+1,parseLine(lines(i)))
  }

  def parseLine(line: String): Seq[(Int,Int,String)] = {
    def tidy(s: String) = s.replaceAll("""[,;\.\-\?\!\—]""", "")
    val p = new ConcordanceParser
    val r = p.parseAll(p.sentence,line) match {
      case p.Success(ws,_) => ws
      case p.Failure(e,_) => println(e); List()
      case _ => println("PositionalParser: logic error"); List()
    }
    r map {case p @ PositionalString(s) => (p.pos.line,p.pos.column,tidy(s).toLowerCase)}
  }
} 
Example 3
Source File: AssembleSeqOptimizerImpl.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4assemble

import ee.cone.c4assemble.Types._
import ee.cone.c4di.{c4, c4multi}

import scala.annotation.tailrec
import scala.collection.immutable.{Map, Seq}
import scala.concurrent.{ExecutionContext, Future}

@c4multi("AssembleApp") final class LoopExpression[MapKey, Value](
  outputWorldKeys: Seq[AssembledKey],
  loopOutputIndex: Int,
  wasOutputWorldKey: AssembledKey,
  main: WorldPartExpression, // with DataDependencyTo[Index[MapKey, Value]],
  continue: List[WorldPartExpression],
)(
  updater: IndexUpdater,
  composes: IndexUtil,
  //val outputWorldKey: AssembledKey[Index[MapKey, Value]] = main.outputWorldKey,
  continueF: WorldTransition=>WorldTransition = Function.chain(continue.map(h=>h.transform(_)))
) extends WorldPartExpression {
  private def inner(
    left: Int,
    transition: WorldTransition,
    wasSumDiffs: Option[Seq[Index]], //do not inclide transition.diff-s
  ): Future[(IndexUpdates,IndexUpdates)] = {
    implicit val executionContext: ExecutionContext = transition.executionContext.value
    for {
      diffParts <- Future.sequence(outputWorldKeys.map(_.of(transition.diff)))
      sumDiffs = wasSumDiffs.fold(diffParts)(composes.zipMergeIndex(diffParts))
      res <- if(composes.isEmpty(diffParts(loopOutputIndex))){
        for {
          results <- Future.sequence(outputWorldKeys.map(_.of(transition.result)))
        } yield (
          new IndexUpdates(sumDiffs, results, Nil),
          new IndexUpdates(Seq(emptyIndex),Seq(results(loopOutputIndex)),Nil)
        )
      } else {
        assert(left > 0, s"unstable local assemble $diffParts")
        inner(left - 1, main.transform(continueF(transition)), Option(sumDiffs))
      }
    } yield res
  }
  def transform(transition: WorldTransition): WorldTransition = {
    val transitionA = main.transform(transition)
    if(transition eq transitionA) transition
    else finishTransform(transition, inner(1000, transitionA, None))
  }
  def finishTransform(transition: WorldTransition, next: Future[(IndexUpdates,IndexUpdates)]): WorldTransition = {
    implicit val executionContext: ExecutionContext = transition.executionContext.value
    Function.chain(Seq(
      updater.setPart(outputWorldKeys,next.map(_._1),logTask = true),
      updater.setPart(Seq(wasOutputWorldKey),next.map(_._2),logTask = false)
    ))(transition)
  }
}

@c4("AssembleApp") final class ShortAssembleSeqOptimizer(
  backStageFactory: BackStageFactory,
  loopExpressionFactory: LoopExpressionFactory
) extends AssembleSeqOptimizer {
  private def getSingleKeys[K]: Seq[K] => Set[K] = _.groupBy(i=>i).collect{ case (k,Seq(_)) => k }.toSet
  def optimize: List[Expr]=>List[WorldPartExpression] = expressionsByPriority => {
    val singleOutputKeys: Set[AssembledKey] = getSingleKeys(expressionsByPriority.flatMap(_.outputWorldKeys))
    val singleInputKeys = getSingleKeys(expressionsByPriority.flatMap(_.inputWorldKeys))
    expressionsByPriority.map{ e =>
      Single.option(e.outputWorldKeys.map{ case k:JoinKey => k }.zipWithIndex.flatMap{ case (key,i) =>
        val wKey = key.withWas(was=true)
        if(
          singleOutputKeys(key) && singleInputKeys(wKey) &&
            e.inputWorldKeys.contains(wKey)
        ) loopExpressionFactory.create[Any,Any](
          e.outputWorldKeys, i, wKey, e, backStageFactory.create(List(e))
        ) :: Nil
        else Nil
      }).getOrElse(e)
    }
  }
}

class NoAssembleSeqOptimizer() extends AssembleSeqOptimizer {
  def optimize: List[Expr]=>List[WorldPartExpression] = l=>l
} 
Example 4
Source File: ByPriorityImpl.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4assemble

import ee.cone.c4di.c4

import scala.collection.immutable.Map

@c4("AssembleApp") final class ByPriorityImpl extends ByPriority {
  def byPriority[K,V](uses: K=>(List[K],List[V]=>V)): List[K] => List[V] =
    new ByPriorityBuilder[K,V](uses).apply
}

case class PriorityState[K,V](map: Map[K,V], values: List[V], inProcess: Set[K], inProcessList: List[K])

class ByPriorityBuilder[K,V](uses: K=>(List[K],List[V]=>V)) {
  private def add(state: PriorityState[K,V], key: K): PriorityState[K,V] =
    if(state.map.contains(key)) state else {
      if (state.inProcess(key)) throw new Exception(s"${state.inProcessList.mkString("\n")} \nhas $key")
      val (useKeys,toValue) = uses(key)
      val deeperState = state.copy(
        inProcess = state.inProcess + key,
        inProcessList = key :: state.inProcessList
      )
      val filled: PriorityState[K, V] = useKeys.foldLeft(deeperState)(add)
      val value = toValue(useKeys.map(filled.map))
      state.copy(map = filled.map + (key->value), values = value :: filled.values)
    }
  def apply(items: List[K]): List[V] =
    items.foldLeft(PriorityState[K,V](Map.empty[K,V],Nil,Set.empty[K],Nil))(add).values
} 
Example 5
Source File: RichRawImpl.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4actor

import java.util.concurrent.ExecutorService

import com.typesafe.scalalogging.LazyLogging
import ee.cone.c4assemble._
import ee.cone.c4assemble.Types._
import ee.cone.c4proto.ToByteString

import scala.collection.immutable.Map
import scala.concurrent.ExecutionContext
import java.lang.Math.toIntExact

import ee.cone.c4actor.QProtocol._
import ee.cone.c4actor.Types._
import ee.cone.c4di.c4

@c4("RichDataCompApp") final class GetOffsetImpl(
  actorName: ActorName,
  getS_Offset: GetByPK[S_Offset],
) extends GetOffset {
  def of: SharedContext with AssembledContext => NextOffset =
    ctx => getS_Offset.ofA(ctx).get(actorName.value).fold(empty)(_.txId)
  def empty: NextOffset = "0" * OffsetHexSize()
}

object EmptyInjected extends Injected

@c4("RichDataCompApp") final class RichRawWorldReducerImpl(
  injected: List[Injected],
  toUpdate: ToUpdate,
  actorName: ActorName,
  execution: Execution,
  getOffset: GetOffsetImpl,
  readModelAdd: ReadModelAdd,
  getAssembleOptions: GetAssembleOptions,
) extends RichRawWorldReducer with LazyLogging {
  def reduce(contextOpt: Option[SharedContext with AssembledContext], addEvents: List[RawEvent]): RichContext = {
    val events = if(contextOpt.nonEmpty) addEvents else {
      val offset = addEvents.lastOption.fold(getOffset.empty)(_.srcId)
      val firstborn = LEvent.update(S_Firstborn(actorName.value,offset)).toList.map(toUpdate.toUpdate)
      val (bytes, headers) = toUpdate.toBytes(firstborn)
      SimpleRawEvent(offset, ToByteString(bytes), headers) :: addEvents
    }
    if(events.isEmpty) contextOpt.get match {
      case context: RichRawWorldImpl => context
      case context => create(context.injected, context.assembled, context.executionContext)
    } else {
      val context = contextOpt.getOrElse(
        create(Single.option(injected).getOrElse(EmptyInjected), emptyReadModel, EmptyOuterExecutionContext)
      )
      val nAssembled = readModelAdd.add(events, context)
      create(context.injected, nAssembled, context.executionContext)
    }
  }

  def create(injected: Injected, assembled: ReadModel, executionContext: OuterExecutionContext): RichRawWorldImpl = {
    val preWorld = new RichRawWorldImpl(injected, assembled, executionContext, "")
    val threadCount = getAssembleOptions.get(assembled).threadCount
    val offset = getOffset.of(preWorld)
    new RichRawWorldImpl(injected, assembled, needExecutionContext(threadCount)(executionContext), offset)
  }
  def newExecutionContext(confThreadCount: Long): OuterExecutionContext = {
    val fixedThreadCount = if(confThreadCount>0) toIntExact(confThreadCount) else Runtime.getRuntime.availableProcessors
    val pool = execution.newExecutorService("ass-",Option(fixedThreadCount))
    logger.info(s"ForkJoinPool create $fixedThreadCount")
    new OuterExecutionContextImpl(confThreadCount,fixedThreadCount,ExecutionContext.fromExecutor(pool),pool)
  }
  def needExecutionContext(confThreadCount: Long): OuterExecutionContext=>OuterExecutionContext = {
    case ec: OuterExecutionContextImpl if ec.confThreadCount == confThreadCount =>
      ec
    case ec: OuterExecutionContextImpl =>
      ec.service.shutdown()
      logger.info("ForkJoinPool shutdown")
      newExecutionContext(confThreadCount)
    case _ =>
      newExecutionContext(confThreadCount)
  }
}

class OuterExecutionContextImpl(
  val confThreadCount: Long,
  val threadCount: Long,
  val value: ExecutionContext,
  val service: ExecutorService
) extends OuterExecutionContext
object EmptyOuterExecutionContext extends OuterExecutionContext {
  def value: ExecutionContext = throw new Exception("no ExecutionContext")
  def threadCount: Long =  throw new Exception("no ExecutionContext")
}

class RichRawWorldImpl(
  val injected: Injected,
  val assembled: ReadModel,
  val executionContext: OuterExecutionContext,
  val offset: NextOffset
) extends RichContext
 
Example 6
Source File: ProtocolApi.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4actor

import ee.cone.c4di.TypeKey
import ee.cone.c4proto._

import scala.collection.immutable.Map

abstract class GeneralDefaultArgument {
  def value: Any
}
abstract class DefaultArgument[Value](val value: Value) extends GeneralDefaultArgument
abstract class ArgAdapterFactory(val key: TypeKey, val wrap: (()=>ProtoAdapter[Any])=>ArgAdapter[_])
abstract class LazyArgAdapterFactory(val key: TypeKey, val wrap: (()=>ProtoAdapter[Any])=>ArgAdapter[_])

object ArgTypes {
  type LazyOption[T] = Option[T]
  type LazyList[T] = List[T]
}

trait QAdapterRegistry {
  def byName: Map[String, ProtoAdapter[Product] with HasId]
  def byId: Map[Long, ProtoAdapter[Product] with HasId]
} 
Example 7
Source File: CommonIdInjectApps.scala    From c4proto   with Apache License 2.0 5 votes vote down vote up
package ee.cone.c4actor.dep

import ee.cone.c4actor.dep.ContextTypes.{ContextId, MockRoleOpt, RoleId, UserId}
import ee.cone.c4actor.dep.request.ContextIdRequestProtocol.{N_ContextIdRequest, N_MockRoleRequest, N_RoleIdRequest, N_UserIdRequest}

import scala.collection.immutable.Map

trait CommonIdInjectApps
  extends ContextIdInjectApp
    with UserIdInjectApp
    with RoleIdInjectApp
with MockRoleIdInjectApp

trait ContextIdInjectApp extends DepAskFactoryApp {
  private lazy val sessionIdAsk: DepAsk[N_ContextIdRequest, ContextId] = depAskFactory.forClasses(classOf[N_ContextIdRequest], classOf[ContextId])

  def injectContext[ReasonIn <: Product](reason: DepAsk[ReasonIn, _], handler: ReasonIn => ContextId): DepHandler =
    sessionIdAsk.byParent(reason, (rq: ReasonIn) => Map(N_ContextIdRequest() -> handler(rq)))
}

trait UserIdInjectApp extends DepAskFactoryApp {
  private lazy val userIdAsk: DepAsk[N_UserIdRequest, ContextId] = depAskFactory.forClasses(classOf[N_UserIdRequest], classOf[UserId])

  def injectUser[ReasonIn <: Product](reason: DepAsk[ReasonIn, _], handler: ReasonIn => UserId): DepHandler =
    userIdAsk.byParent(reason, (rq: ReasonIn) => Map(N_UserIdRequest() -> handler(rq)))
}

trait RoleIdInjectApp extends DepAskFactoryApp {
  private lazy val roleIdAsk: DepAsk[N_RoleIdRequest, ContextId] = depAskFactory.forClasses(classOf[N_RoleIdRequest], classOf[RoleId])

  def injectRole[ReasonIn <: Product](reason: DepAsk[ReasonIn, _], handler: ReasonIn => RoleId): DepHandler =
    roleIdAsk.byParent(reason, (rq: ReasonIn) => Map(N_RoleIdRequest() -> handler(rq)))
}

trait MockRoleIdInjectApp extends DepAskFactoryApp {
  private lazy val mockRoleIdAsk: DepAsk[N_MockRoleRequest, MockRoleOpt] = depAskFactory.forClasses(classOf[N_MockRoleRequest], classOf[MockRoleOpt])

  def injectMockRole[ReasonIn <: Product](reason: DepAsk[ReasonIn, _], handler: ReasonIn => MockRoleOpt): DepHandler =
    mockRoleIdAsk.byParent(reason, (rq: ReasonIn) => Map(N_MockRoleRequest() -> handler(rq)))
} 
Example 8
Source File: HbaseClientConfiguration.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.hbase.conf

import scala.collection.immutable.Map
import scala.language.implicitConversions

import com.paypal.gimel.common.catalog.DataSetProperties
import com.paypal.gimel.common.conf.{GimelConstants, GimelProperties}
import com.paypal.gimel.common.utilities.GenericUtils
import com.paypal.gimel.logger.Logger


class HbaseClientConfiguration(val props: Map[String, Any]) {

  private val logger = Logger()
  logger.info(s"Begin Building --> ${this.getClass.getName}")
  //  logger.info(s"Incoming Properties --> ${props.map(x => s"${x._1} -> ${x._2}").mkString("\n")}")

  // Load Default Prop from Resource File
  val pcatProps = GimelProperties()

  // appTag is used to maintain checkpoints & various other factors that are unique to the application
  val appTag: String = props.getOrElse(GimelConstants.APP_TAG, "").toString

  // This is the DataSet Properties
  val datasetProps: DataSetProperties = props(GimelConstants.DATASET_PROPS).asInstanceOf[DataSetProperties]
  val tableProps: Map[String, String] = datasetProps.props

  val clusterName = com.paypal.gimel.common.utilities.DataSetUtils.getYarnClusterName()
  val hbaseNameSpaceAndTable = GenericUtils.getValueFailIfEmpty(tableProps, HbaseConfigs.hbaseTableKey,
    "HBase table name not found. Please set the property " + HbaseConfigs.hbaseTableKey)
  val hbaseTableColumnMapping = tableProps.getOrElse(HbaseConfigs.hbaseColumnMappingKey, "")
  val maxSampleRecordsForSchema = GenericUtils.getValue(tableProps, HbaseConfigs.hbaseMaxRecordsForSchema, HbaseConstants.MAX_SAMPLE_RECORDS_FOR_SCHEMA).toInt
  val maxColumnsForSchema = GenericUtils.getValue(tableProps, HbaseConfigs.hbaseMaxColumnsForSchema, HbaseConstants.MAX_COLUMNS_FOR_SCHEMA).toInt
  // If this property consists of namespace and tablename both separated by colon ":", take the table name by splitting this string
  val hbaseTableNamespaceSplit = hbaseNameSpaceAndTable.split(":")
  val hbaseTableName = if (hbaseTableNamespaceSplit.length > 1) {
    hbaseTableNamespaceSplit(1)
  } else {
    hbaseNameSpaceAndTable
  }
  val hbaseNameSpace = tableProps.getOrElse(HbaseConfigs.hbaseNamespaceKey, HbaseConstants.DEFAULT_NAMESPACE)
  // If ColumnFamily name needs to be appneded with Column Name in resultant Dataframe
  val hbaseColumnNamewithColumnFamilyAppended = tableProps.getOrElse(HbaseConfigs.hbaseColumnNamewithColumnFamilyAppended, "false").toString.toBoolean
  // HDFS path for hbase-site.xml
  val hbaseSiteXMLHDFSPath = tableProps.getOrElse(HbaseConfigs.hbaseSiteXMLHDFSPathKey, HbaseConstants.NONE_STRING)
  val schema: Array[String] = if (datasetProps.fields != null && datasetProps.fields.nonEmpty) {
    datasetProps.fields.map(_.fieldName)
  } else {
    Array.empty[String]
  }

  val getOption = tableProps.getOrElse(HbaseConfigs.hbaseFilter, "")

  // Getting Row Key from user otherwise from schema in UDC or hive table. If it is not present in schema also, set defaultValue
  val hbaseRowKeys = tableProps.getOrElse(HbaseConfigs.hbaseRowKey, HbaseConstants.DEFAULT_ROW_KEY_COLUMN).split(",")

  logger.info(s"Fields Initiated --> ${this.getClass.getFields.map(f => s"${f.getName} --> ${f.get().toString}").mkString("\n")}")
  logger.info(s"Completed Building --> ${this.getClass.getName}")

} 
Example 9
Source File: CassandraClientConfiguration.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.cassandra.conf

import scala.collection.immutable.Map
import scala.language.implicitConversions

import com.paypal.gimel.common.catalog.DataSetProperties
import com.paypal.gimel.common.conf.GimelConstants
import com.paypal.gimel.common.conf.GimelProperties
import com.paypal.gimel.logger.Logger


class CassandraClientConfiguration(val props: Map[String, Any]) {
  private val logger = Logger()
  logger.info(s"Begin Building --> ${this.getClass.getName}")
  logger.debug(s"Incoming Properties --> ${props.map(x => s"${x._1} -> ${x._2}").mkString("\n")}")

  // Load Default Prop from Resource File
  val pcatProps = GimelProperties()

  // appTag is used to maintain checkpoints & other factors that are unique to the application.
  val appTag: String = props.getOrElse(GimelConstants.APP_TAG, "").toString

  // This is the DataSet Properties
  val datasetProps: DataSetProperties = props(GimelConstants.DATASET_PROPS).asInstanceOf[DataSetProperties]
  val tableProps: Map[String, String] = datasetProps.props

  logger.info(s"Hive Table Props --> ${tableProps.map(x => s"${x._1} --> ${x._2}").mkString("\n")}")

  private def errorIfMissing(key: String): Unit = {
    if (tableProps.get(key).isEmpty) {
      throw new Exception(s"Missing $key!")
    }
  }

  errorIfMissing(CassandraConfigs.gimelCassandraConnectionHosts)
  errorIfMissing(CassandraConfigs.gimelCassandraClusterName)
  errorIfMissing(CassandraConfigs.gimelCassandraKeySpaceName)
  errorIfMissing(CassandraConfigs.gimelCassandraTableName)

  private def getOrDefault(key: String, defaultVal: String = null): String = {
    if (defaultVal == null) {
      props.getOrElse(key, tableProps(key)).toString
    } else {
      props.getOrElse(key, tableProps.getOrElse(key, defaultVal)).toString
    }
  }

  val cassandraHosts = getOrDefault(CassandraConfigs.gimelCassandraConnectionHosts)
  val cassandraCluster = getOrDefault(CassandraConfigs.gimelCassandraClusterName)
  val cassandraKeySpace = getOrDefault(CassandraConfigs.gimelCassandraKeySpaceName)
  val cassandraTable = getOrDefault(CassandraConfigs.gimelCassandraTableName)
  val cassandraPushDownIsEnabled =
    getOrDefault(CassandraConfigs.gimelCassandraPushDownIsEnabled, "true").toBoolean
  val cassandraTruncateTableIsEnabled =
    getOrDefault(CassandraConfigs.gimelCassandraTableConfirmTruncate, "false").toBoolean
  val cassandraSparkInputSizeMb = getOrDefault(CassandraConfigs.gimelCassandraInputSize, "256").toInt
  val cassandraSparkTtl = getOrDefault(CassandraConfigs.gimelCassandraSparkTtl, "3600")

  val cassandraDfOptions: Map[String, String] = Map(
    "cluster" -> cassandraCluster
    , "keyspace" -> cassandraKeySpace
    , "table" -> cassandraTable
    , "pushdown" -> cassandraPushDownIsEnabled.toString
    , CassandraConfigs.gimelCassandraInputSize -> cassandraSparkInputSizeMb.toString
  )

} 
Example 10
Source File: HdfsClientConfiguration.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.hdfs.conf

import scala.collection.immutable.Map
import scala.language.implicitConversions

import com.paypal.gimel.common.catalog.DataSetProperties
import com.paypal.gimel.common.conf.{GimelConstants, GimelProperties}
import com.paypal.gimel.logger.Logger


class HdfsClientConfiguration(val props: Map[String, Any]) {


  val logger = Logger()
  logger.info(s"Begin Building --> ${this.getClass.getName}")
  // Load Default Prop from Resource File
  val pcatProps = GimelProperties()

  // appTag is used to maintain checkpoints & various other factors that are unique to the application
  val appTag: String = props.getOrElse(GimelConstants.APP_TAG, "").toString

  // This is the DataSet Properties
  val datasetProps: DataSetProperties = props(GimelConstants.DATASET_PROPS).asInstanceOf[DataSetProperties]
  val tableProps: Map[String, String] = datasetProps.props
  val inferSchema = props.getOrElse(HdfsConfigs.inferSchemaKey, "true").toString
  val header = props.getOrElse(HdfsConfigs.fileHeaderKey, "true").toString
  val rowDelimiter = tableProps.getOrElse(HdfsConfigs.rowDelimiter, HdfsConstants.newLineDelimiterValue).toString

  // Column delimiter for CSV
  // For Backward Compatibility
  val hiveCsvFieldDelimiter = tableProps.getOrElse(HdfsConfigs.hiveFieldDelimiterKey, HdfsConstants.commaDelimiterValue)
  val csvDelimiterV1 = tableProps.getOrElse(HdfsConfigs.columnDelimiterVersion1, hiveCsvFieldDelimiter).toString
  val csvDelimiter = tableProps.getOrElse(HdfsConfigs.columnDelimiter, csvDelimiterV1).toString
  // Column delimiter for other formats
  // For Backward Compatibility
  val hiveFieldDelimiter = tableProps.getOrElse(HdfsConfigs.hiveFieldDelimiterKey, HdfsConstants.controlAOctalDelimiterValue)
  val colDelimiterV1 = tableProps.getOrElse(HdfsConfigs.columnDelimiterVersion1, hiveFieldDelimiter).toString
  val colDelimiter = tableProps.getOrElse(HdfsConfigs.columnDelimiter, colDelimiterV1).toString

  val clusterNameNode = tableProps.getOrElse(GimelConstants.hdfsNameNodeKey, "")
  val clusterDataLocation = tableProps.getOrElse(HdfsConfigs.hdfsDataLocationKey, "")
  val clusterdataFormat = tableProps.getOrElse(HdfsConfigs.hdfsDataFormatKey, "text")
  val clusterThresholdGB = props.getOrElse(HdfsConfigs.hdfsCrossClusterThresholdKey, HdfsConstants.thresholdGBData).toString
  val readOptions = props.getOrElse(HdfsConfigs.readOptions, "").toString

  logger.info(s"Hdfs Props --> ${tableProps.map(x => s"${x._1} --> ${x._2}").mkString("\n")}")
  logger.info(s"Fields Initiated --> ${this.getClass.getFields.map(f => s"${f.getName} --> ${f.get().toString}").mkString("\n")}")
  logger.info(s"Completed Building --> ${this.getClass.getName}")
} 
Example 11
Source File: AerospikeClientConfiguration.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.aerospike.conf

import scala.collection.immutable.Map
import scala.language.implicitConversions

import com.paypal.gimel.aerospike.utilities.AerospikeUtilities.AerospikeDataSetException
import com.paypal.gimel.common.catalog.DataSetProperties
import com.paypal.gimel.common.conf.{GimelConstants, GimelProperties}
import com.paypal.gimel.logger.Logger


  def validateAerospikeProperties(): Unit = {
    if (aerospikeSeedHosts.isEmpty) {
      logger.error("Aerospike Seed Hosts not found.")
      throw AerospikeDataSetException("Aerospike Seed Hosts not found.")
    }
    if (aerospikeNamespace.isEmpty) {
      logger.error("Aerospike namespace not found.")
      throw AerospikeDataSetException("Aerospike namespace not found.")
    }
    if (aerospikeSet.isEmpty) {
      logger.error("Aerospike Set Name not found.")
      throw AerospikeDataSetException("Aerospike Set Name not found.")
    }
  }
} 
Example 12
Source File: JDBCUtilityFunctions.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.jdbc.utilities

import scala.collection.immutable.Map

import org.apache.spark.sql.types.StructField

import com.paypal.gimel.common.conf.GimelConstants

object JDBCUtilityFunctions {
  
  def prepareCreateStatement(sql: String, dbtable: String, dataSetProps: Map[String, Any]): String = {
    // Here we remove the SELECT portion and have only the CREATE portion of the DDL supplied so that we can use that to create the table
    val sqlParts = sql.split(" ")
    val lenPartKeys = sqlParts.length
    val index = sqlParts.indexWhere(_.toUpperCase() == "SELECT")
    val createOnly: String = sqlParts.slice(0, index - 1).mkString(" ")

    // Here we remove the PCATALOG prefix => we replace pcatalog.storagetype.storagesystem.DB.Table with DB.Table
    val createParts = createOnly.split(" ")
    val pcatSQL = createParts.map(element => {
      if (element.toLowerCase().contains(GimelConstants.PCATALOG_STRING)) {
        // we replace pcatalog.storagetype.storagesystem.DB.Table with DB.Table
        element.split('.').tail.mkString(".").split('.').tail.mkString(".").split('.').tail.mkString(".")
      }
      else {
        element
      }
    }
    ).mkString(" ")

    val sparkSchema = dataSetProps(GimelConstants.TABLE_FILEDS).asInstanceOf[Array[StructField]]
    // From the dataframe schema, translate them into Teradata data types
    val gimelSchema: Array[com.paypal.gimel.common.catalog.Field] = sparkSchema.map(x => {
      com.paypal.gimel.common.catalog.Field(x.name, SparkToJavaConverter.getTeradataDataType(x.dataType), x.nullable)
    })
    val colList: Array[String] = gimelSchema.map(x => (x.fieldName + " " + (x.fieldType) + ","))
    val conCatColumns = colList.mkString("").dropRight(1)
    val colQulifier = s"""(${conCatColumns})"""

    // Here we inject back the columns with data types back in the SQL statemnt
    val newSqlParts = pcatSQL.split(" ")
    val PCATindex = newSqlParts.indexWhere(_.toUpperCase().contains("TABLE"))
    val catPrefix = newSqlParts.slice(0, PCATindex + 2).mkString(" ")
    val catSuffix = newSqlParts.slice(PCATindex + 2, newSqlParts.length).mkString(" ")
    val fullStatement = s"""${catPrefix} ${colQulifier} ${catSuffix}"""
    fullStatement.trim()
  }


} 
Example 13
Source File: KafkaBatchProducer.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.kafka2.writer

import java.util.Properties

import scala.collection.JavaConverters._
import scala.collection.immutable.Map
import scala.language.implicitConversions
import scala.reflect.runtime.universe._

import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.DataFrame

import com.paypal.gimel.kafka2.conf.{KafkaClientConfiguration, KafkaConstants}
import com.paypal.gimel.kafka2.utilities.{KafkaOptionsLoaderUtils, KafkaUtilitiesException}


  def produceToKafka(conf: KafkaClientConfiguration, dataFrame: DataFrame): Unit = {
    def MethodName: String = new Exception().getStackTrace().apply(1).getMethodName()
    logger.info(" @Begin --> " + MethodName)

    val kafkaProps: Properties = conf.kafkaProducerProps
    logger.info(s"Kafka Props for Producer -> ${kafkaProps.asScala.mkString("\n")}")
    val kafkaTopic = conf.kafkaTopics
    val kafkaTopicsOptionsMap : Map[String, Map[String, String]] =
      KafkaOptionsLoaderUtils.getAllKafkaTopicsOptions(conf)
    logger.info("Kafka options loaded -> " + kafkaTopicsOptionsMap)
    val eachKafkaTopicToOptionsMap = KafkaOptionsLoaderUtils.getEachKafkaTopicToOptionsMap(kafkaTopicsOptionsMap)
    logger.info("Begin Publishing to Kafka....")
    try {
      val kafkaTopicOptions = eachKafkaTopicToOptionsMap.get(kafkaTopic)
      kafkaTopicOptions match {
        case None =>
          throw new IllegalStateException(s"""Could not load options for the kafka topic -> $kafkaTopic""")
        case Some(kafkaOptions) =>
          dataFrame
            .write
            .format(KafkaConstants.KAFKA_FORMAT)
            .option(KafkaConstants.KAFKA_TOPIC, kafkaTopic)
            .options(kafkaOptions)
            .save()
      }
    }
    catch {
      case ex: Throwable => {
        ex.printStackTrace()
        val msg =
          s"""
             |kafkaTopic -> ${kafkaTopic}
             |kafkaParams --> ${kafkaProps.asScala.mkString("\n")}}
          """.stripMargin
        throw new KafkaUtilitiesException(s"Failed While Pushing Data Into Kafka \n ${msg}")
      }
    }
    logger.info("Publish to Kafka - Completed !")
  }
} 
Example 14
Source File: KafkaStreamProducer.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.kafka2.writer

import java.util.Properties

import scala.collection.JavaConverters._
import scala.collection.immutable.Map
import scala.language.implicitConversions

import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.streaming.DataStreamWriter

import com.paypal.gimel.common.conf.GimelConstants
import com.paypal.gimel.kafka2.conf.{KafkaClientConfiguration, KafkaConstants}
import com.paypal.gimel.kafka2.utilities.{KafkaOptionsLoaderUtils, KafkaUtilitiesException}

object KafkaStreamProducer {
  val logger = com.paypal.gimel.logger.Logger()

  
  def produceStreamToKafka(conf: KafkaClientConfiguration, dataFrame: DataFrame): DataStreamWriter[Row] = {
    def MethodName: String = new Exception().getStackTrace().apply(1).getMethodName()
    logger.info(" @Begin --> " + MethodName)

    val kafkaProps: Properties = conf.kafkaProducerProps
    logger.info(s"Kafka Props for Producer -> ${kafkaProps.asScala.mkString("\n")}")
    logger.info("Begin Publishing to Kafka....")
    // Retrieve kafka options from OptionsLoader if specified
    val kafkaTopicsOptionsMap : Map[String, Map[String, String]] = KafkaOptionsLoaderUtils.getAllKafkaTopicsOptions(conf)
    logger.info("kafkaTopicsOptionsMap -> " + kafkaTopicsOptionsMap)
    try {
      val eachKafkaTopicToOptionsMap = KafkaOptionsLoaderUtils.getEachKafkaTopicToOptionsMap(kafkaTopicsOptionsMap)
      val kafkaTopicOptions = eachKafkaTopicToOptionsMap.get(conf.kafkaTopics)
      kafkaTopicOptions match {
        case None =>
          throw new IllegalStateException(s"""Could not load options for the kafka topic -> $conf.kafkaTopics""")
        case Some(kafkaOptions) =>
          dataFrame
            .writeStream
            .format(KafkaConstants.KAFKA_FORMAT)
            .option(KafkaConstants.KAFKA_TOPIC, conf.kafkaTopics)
            .option(GimelConstants.STREAMING_CHECKPOINT_LOCATION, conf.streamingCheckpointLocation)
            .outputMode(conf.streamingOutputMode)
            .options(kafkaOptions)
      }
    }
    catch {
      case ex: Throwable => {
        ex.printStackTrace()
        val msg =
          s"""
             |kafkaTopic -> ${conf.kafkaTopics}
             |kafkaParams --> ${kafkaProps.asScala.mkString("\n")}}
          """.stripMargin
        throw new KafkaUtilitiesException(s"Failed While Pushing Data Into Kafka \n ${msg}")
      }
    }
  }
} 
Example 15
Source File: KafkaOptionsLoaderUtils.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.kafka2.utilities

import scala.collection.immutable.Map
import scala.util.{Failure, Success, Try}

import com.paypal.gimel.common.conf.{GimelConstants, KafkaOptionsLoader}
import com.paypal.gimel.kafka2.conf.{KafkaClientConfiguration, KafkaConfigs, KafkaConstants}
import com.paypal.gimel.kafka2.utilities.KafkaUtilities.logger

object KafkaOptionsLoaderUtils {

  
  def getEachKafkaTopicToOptionsMap(kafkaOptions: Map[String, Map[String, String]]): Map[String, Map[String, String]] = {
    def MethodName: String = new Exception().getStackTrace.apply(1).getMethodName

    logger.info(" @Begin --> " + MethodName)

    val kafkaOptionsWithKafkaKeyword: Map[String, Map[String, String]] = kafkaOptions.map{case (topicList, options) => {
      (topicList, options.map(eachOption => {
        if (!eachOption._1.startsWith(KafkaConstants.KAFKA_CONST)) {
          (s"${KafkaConstants.KAFKA_CONST}.${eachOption._1}", eachOption._2)
        } else {
          (eachOption._1, eachOption._2)
        }
      }))
    }}
    kafkaOptionsWithKafkaKeyword.flatMap(x => {
      x._1.split(",").map(each => (each, x._2))
    })
  }

} 
Example 16
Source File: KafkaOptionsLoaderUtilsTest.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.kafka2.utilities

import scala.collection.immutable.Map

import org.scalatest._

import com.paypal.gimel.common.catalog.DataSetProperties
import com.paypal.gimel.common.conf.GimelConstants
import com.paypal.gimel.kafka2.conf.{KafkaClientConfiguration, KafkaConfigs}

class KafkaOptionsLoaderUtilsTest extends FunSpec with Matchers {
  val topic = "test_gimel_consumer"

  describe("getAllKafkaTopicsOptionsFromLoader") {
    it("should get all options from kafka options loader") {
      val props: Map[String, String] = Map(KafkaConfigs.whiteListTopicsKey -> topic,
        KafkaConfigs.kafkaOptionsLoaderKey -> "com.paypal.gimel.kafka2.utilities.MockKafkaoptionsLoader")
      val appTag = "test_app_tag"
      val dataSetProperties = DataSetProperties("KAFKA", null, null, props)
      val datasetProps: Map[String, Any] = Map("dataSetProperties" -> dataSetProperties,
        GimelConstants.APP_TAG -> appTag)
      val conf = new KafkaClientConfiguration(datasetProps)
      assert(KafkaOptionsLoaderUtils.getAllKafkaTopicsOptionsFromLoader(conf)
        .sameElements(Map("test_gimel_consumer" -> Map("bootstrap.servers" -> "localhost:9092"))))
    }
  }

  describe("getAllKafkaTopicsOptions") {
    it("should get all options from kafka options loader if specified") {
      val props: Map[String, String] = Map(KafkaConfigs.whiteListTopicsKey -> topic,
        KafkaConfigs.kafkaOptionsLoaderKey -> "com.paypal.gimel.kafka2.utilities.MockKafkaoptionsLoader")
      val appTag = "test_app_tag"
      val dataSetProperties = DataSetProperties("KAFKA", null, null, props)
      val datasetProps: Map[String, Any] = Map("dataSetProperties" -> dataSetProperties,
        GimelConstants.APP_TAG -> appTag)
      val conf = new KafkaClientConfiguration(datasetProps)
      assert(KafkaOptionsLoaderUtils.getAllKafkaTopicsOptions(conf)
        .sameElements(Map("test_gimel_consumer" -> Map("bootstrap.servers" -> "localhost:9092"))))
    }

    it("should get default options if kafka options loader is not specified") {
      val props: Map[String, String] = Map(KafkaConfigs.whiteListTopicsKey -> topic,
        KafkaConfigs.kafkaServerKey -> "localhost:9093")
      val appTag = "test_app_tag"
      val dataSetProperties = DataSetProperties("KAFKA", null, null, props)
      val datasetProps: Map[String, Any] = Map("dataSetProperties" -> dataSetProperties,
        GimelConstants.APP_TAG -> appTag)
      val conf = new KafkaClientConfiguration(datasetProps)
      assert(KafkaOptionsLoaderUtils.getAllKafkaTopicsOptions(conf)
        .sameElements(Map("test_gimel_consumer" -> Map("bootstrap.servers" -> "localhost:9093"))))
    }
  }

  describe("getAllKafkaTopicsDefaultOptions") {
    it("should get all options from kafka options loader") {
      val props: Map[String, String] = Map(KafkaConfigs.whiteListTopicsKey -> topic,
        KafkaConfigs.kafkaServerKey -> "localhost:9093")
      val appTag = "test_app_tag"
      val dataSetProperties = DataSetProperties("KAFKA", null, null, props)
      val datasetProps: Map[String, Any] = Map("dataSetProperties" -> dataSetProperties,
        GimelConstants.APP_TAG -> appTag)
      val conf = new KafkaClientConfiguration(datasetProps)
      assert(KafkaOptionsLoaderUtils.getAllKafkaTopicsDefaultOptions(conf)
        .sameElements(Map("test_gimel_consumer" -> Map("bootstrap.servers" -> "localhost:9093"))))
    }
  }

  describe("getEachKafkaTopicToOptionsMap") {
    it("should get a map of each topic to its properties") {
      val props: Map[String, String] = Map(KafkaConfigs.whiteListTopicsKey -> s"$topic,${topic}_1,${topic}_2",
        KafkaConfigs.kafkaOptionsLoaderKey -> "com.paypal.gimel.kafka2.utilities.MockKafkaoptionsLoader")
      val appTag = "test_app_tag"
      val dataSetProperties = DataSetProperties("KAFKA", null, null, props)
      val datasetProps: Map[String, Any] = Map("dataSetProperties" -> dataSetProperties,
        GimelConstants.APP_TAG -> appTag)
      val conf = new KafkaClientConfiguration(datasetProps)
      val kafkaOptionsMap = KafkaOptionsLoaderUtils.getAllKafkaTopicsDefaultOptions(conf)
      assert(KafkaOptionsLoaderUtils.getEachKafkaTopicToOptionsMap(kafkaOptionsMap)
        == Map("test_gimel_consumer" -> Map("kafka.bootstrap.servers" -> "localhost:9092"),
          "test_gimel_consumer_1" -> Map("kafka.bootstrap.servers" -> "localhost:9092"),
          "test_gimel_consumer_2" -> Map("kafka.bootstrap.servers" -> "localhost:9092"))
      )
    }
  }
} 
Example 17
Source File: IncompleteConfiguredDecoders.scala    From circe-generic-extras   with Apache License 2.0 5 votes vote down vote up
package io.circe.generic.extras.decoding

import io.circe.{ Decoder, HCursor }
import io.circe.generic.extras.Configuration
import io.circe.generic.extras.util.RecordToMap
import io.circe.generic.util.PatchWithOptions
import scala.collection.immutable.Map
import shapeless.{ Default, HList, LabelledGeneric }
import shapeless.ops.function.FnFromProduct
import shapeless.ops.record.RemoveAll

private[circe] trait IncompleteConfiguredDecoders {
  implicit final def decodeIncompleteCaseClass[F, P <: HList, A, D <: HList, T <: HList, R <: HList](implicit
    ffp: FnFromProduct.Aux[P => A, F],
    gen: LabelledGeneric.Aux[A, T],
    removeAll: RemoveAll.Aux[T, P, (P, R)],
    decode: ReprDecoder[R],
    defaults: Default.AsRecord.Aux[A, D],
    defaultMapper: RecordToMap[D],
    config: Configuration
  ): ConfiguredDecoder[F] = new ConfiguredDecoder[F](config) {
    private[this] val defaultMap: Map[String, Any] = if (config.useDefaults) defaultMapper(defaults()) else Map.empty

    final def apply(c: HCursor): Decoder.Result[F] = decode.configuredDecode(c)(
      config.transformMemberNames,
      constructorNameTransformer,
      defaultMap,
      None
    ) match {
      case Right(r)    => Right(ffp(p => gen.from(removeAll.reinsert((p, r)))))
      case l @ Left(_) => l.asInstanceOf[Decoder.Result[F]]
    }

    override final def decodeAccumulating(c: HCursor): Decoder.AccumulatingResult[F] =
      decode
        .configuredDecodeAccumulating(c)(
          config.transformMemberNames,
          constructorNameTransformer,
          defaultMap,
          None
        )
        .map(r => ffp(p => gen.from(removeAll.reinsert((p, r)))))
  }

  implicit final def decodeCaseClassPatch[A, D <: HList, R <: HList, O <: HList](implicit
    gen: LabelledGeneric.Aux[A, R],
    patch: PatchWithOptions.Aux[R, O],
    decode: ReprDecoder[O],
    defaults: Default.AsRecord.Aux[A, D],
    defaultMapper: RecordToMap[D],
    config: Configuration
  ): ConfiguredDecoder[A => A] = new ConfiguredDecoder[A => A](config) {
    private[this] val defaultMap: Map[String, Any] = if (config.useDefaults) defaultMapper(defaults()) else Map.empty

    final def apply(c: HCursor): Decoder.Result[A => A] = decode.configuredDecode(c)(
      config.transformMemberNames,
      constructorNameTransformer,
      defaultMap,
      None
    ) match {
      case Right(o)    => Right(a => gen.from(patch(gen.to(a), o)))
      case l @ Left(_) => l.asInstanceOf[Decoder.Result[A => A]]
    }

    override final def decodeAccumulating(c: HCursor): Decoder.AccumulatingResult[A => A] =
      decode
        .configuredDecodeAccumulating(c)(
          config.transformMemberNames,
          constructorNameTransformer,
          defaultMap,
          None
        )
        .map(o => a => gen.from(patch(gen.to(a), o)))
  }
} 
Example 18
Source File: SFTPClientConfiguration.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.sftp.conf

import scala.collection.immutable.Map

import com.paypal.gimel.common.catalog.DataSetProperties
import com.paypal.gimel.common.conf.{GimelConstants, GimelProperties}
import com.paypal.gimel.logger.Logger
import com.paypal.gimel.sftp.utilities.SFTPUtilities

class SFTPClientConfiguration(val props: Map[String, Any]) {

  private val logger = Logger()
  logger.info(s"Begin Building --> ${this.getClass.getName}")
  logger.info(s"Incoming Properties --> ${props.map(x => s"${x._1} -> ${x._2}").mkString("\n")}")

  val dataSetProps: DataSetProperties = props(GimelConstants.DATASET_PROPS).asInstanceOf[DataSetProperties]
  val tableProps: scala.collection.immutable.Map[String, String] = dataSetProps.props
  val allProps: Map[String, String] = tableProps ++ props.map { x => (x._1, x._2.toString) }
  val filePath: String = props.getOrElse(SFTPConfigs.filePath, tableProps.getOrElse(SFTPConfigs.filePath, "")).toString

  // Password Strategy will be a) user b) file c) batch
  // If it file, passwordFileSource will tell whether to read from hdfs or local
  val password: String = tableProps.getOrElse(SFTPConfigs.passwordStrategy, SFTPConstants.fileStrategy) match {
    case SFTPConstants.fileStrategy => {
      tableProps.getOrElse(SFTPConfigs.passwordFileSource, props.get(SFTPConfigs.passwordFileSource).get).toString match {
        case SFTPConstants.HDFSPasswordFile => SFTPUtilities.getPasswordFromHDFS(tableProps.getOrElse(SFTPConfigs.passwordFilePath, props.get(SFTPConfigs.passwordFilePath).get).toString)
        case SFTPConstants.localPasswordFile => SFTPUtilities.getPasswordFromLocal(tableProps.getOrElse(SFTPConfigs.passwordFilePath, props.get(SFTPConfigs.passwordFilePath).get).toString)
      }
    }
    case SFTPConstants.userStrategy => tableProps.getOrElse(SFTPConfigs.password, props.get(SFTPConfigs.password)).toString
    case SFTPConstants.batchStrategy => SFTPConstants.passwordLess
  }
  val finalProps: Map[String, String] = allProps ++ Map(
    SFTPConstants.host -> props.getOrElse(SFTPConfigs.sftpHost, tableProps.get(SFTPConfigs.sftpHost).get).toString,
    SFTPConstants.username -> props.getOrElse(SFTPConfigs.sftpUserName, tableProps.getOrElse(SFTPConfigs.sftpUserName, "")).toString,
    SFTPConstants.password -> password,
    SFTPConstants.filetype -> props.getOrElse(SFTPConfigs.fileType, tableProps.getOrElse(SFTPConfigs.fileType, "")).toString,
    SFTPConstants.delimiter -> props.getOrElse(SFTPConfigs.delimiter, tableProps.getOrElse(SFTPConfigs.delimiter, ",")).toString,
    SFTPConstants.inferschema -> props.getOrElse(SFTPConfigs.inferSchema, tableProps.getOrElse(SFTPConfigs.inferSchema, "false")).toString)

  logger.info("Final Props are " + finalProps)
} 
Example 19
Source File: S3ClientConfiguration.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.s3.conf

import scala.collection.immutable.Map

import com.paypal.gimel.common.catalog.DataSetProperties
import com.paypal.gimel.common.conf.{GimelConstants, GimelProperties}
import com.paypal.gimel.logger.Logger
import com.paypal.gimel.s3.utilities.S3Utilities

class S3ClientConfiguration(val props: Map[String, Any]) {

  private val logger = Logger()

  logger.info(s"Begin Building --> ${this.getClass.getName}")
  logger.info(s"Incoming Properties --> ${props.map(x => s"${x._1} -> ${x._2}").mkString("\n")}")

  val dataSetProps: DataSetProperties = props(GimelConstants.DATASET_PROPS).asInstanceOf[DataSetProperties]
  val tableProps: scala.collection.immutable.Map[String, String] = dataSetProps.props
  val allProps: Map[String, String] = tableProps ++ props.map { x => (x._1, x._2.toString) }

  val objectPath: String = props.getOrElse(S3Configs.objectPath, tableProps.getOrElse(S3Configs.objectPath, "")).toString
  if(objectPath.isEmpty) {
    throw new Exception("Object Path cannot be empty. Set the property " + S3Configs.objectPath + " !!")
  }

  val (accessId, secretKey) = props.getOrElse(S3Configs.credentialsStrategy, tableProps.getOrElse(S3Configs.credentialsStrategy, S3Constants.fileStrategy)) match {
    case S3Constants.fileStrategy => {
      val credentialsFileSource = props.getOrElse(S3Configs.credentialsFileSource, tableProps.getOrElse(S3Configs.credentialsFileSource, "")).toString
      if(credentialsFileSource.isEmpty) {
        throw new Exception("Credentials File Source cannot be empty. Set the property " + S3Configs.credentialsFileSource + " !!")
      }
      val credentialsFilePath = props.getOrElse(S3Configs.credentialsFilePath, tableProps.getOrElse(S3Configs.credentialsFilePath, "")).toString
      if(credentialsFilePath.isEmpty) {
        throw new Exception("Credentials File Path cannot be empty. Set the property " + S3Configs.credentialsFilePath + " !!")
      }
      credentialsFileSource match {
        case S3Constants.HDFSCredentialsFile => S3Utilities.getCredentialsFromHDFS(credentialsFilePath)
        case S3Constants.localCredentialsFile => S3Utilities.getCredentialsFromLocal(credentialsFilePath)
      }
    }
    case S3Constants.userStrategy => (props.getOrElse(S3Configs.accessId, tableProps.getOrElse(S3Configs.accessId, "")).toString, props.getOrElse(S3Configs.secretKey, tableProps.getOrElse(S3Configs.secretKey, "")).toString)
    case _ => (S3Constants.credentialLess, S3Constants.credentialLess)
  }

  val s3aImpl = props.getOrElse(S3Configs.s3aClientImpl, tableProps.getOrElse(S3Configs.s3aClientImpl, S3Constants.s3aImpl)).toString
  val sslEnabled = props.getOrElse(S3Configs.sslEnabled, tableProps.getOrElse(S3Configs.sslEnabled, "false")).toString
  val objectFormat = props.getOrElse(S3Configs.objectFormat, tableProps.getOrElse(S3Configs.objectFormat, "text"))
  val pathStyleAccess = props.getOrElse(S3Configs.pathStyleAccess, tableProps.getOrElse(S3Configs.pathStyleAccess, "true")).toString
  val endPoint = props.getOrElse(S3Configs.endPoint, tableProps.getOrElse(S3Configs.endPoint, "")).toString
  val delimiter = props.getOrElse(S3Configs.delimiter, tableProps.getOrElse(S3Configs.delimiter, ",")).toString
  val inferSchema = props.getOrElse(S3Configs.inferSchema, tableProps.getOrElse(S3Configs.inferSchema, "false")).toString
  val header = props.getOrElse(S3Configs.header, tableProps.getOrElse(S3Configs.header, "false")).toString
  val saveMode = props.getOrElse(S3Configs.saveMode, tableProps.getOrElse(S3Configs.saveMode, "error")).toString

  val finalProps: Map[String, String] = allProps ++ Map(
    S3Constants.delimiter -> props.getOrElse(S3Configs.delimiter, tableProps.getOrElse(S3Configs.delimiter, ",")).toString,
    S3Constants.inferschema -> props.getOrElse(S3Configs.inferSchema, tableProps.getOrElse(S3Configs.inferSchema, "false")).toString,
    S3Constants.header -> props.getOrElse(S3Configs.header, tableProps.getOrElse(S3Configs.header, "false")).toString)
} 
Example 20
Source File: ConfluentSchemaRegistry.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.common.schema

import io.confluent.kafka.schemaregistry.client.rest.RestService
import scala.collection.JavaConverters._
import scala.collection.immutable.Map
import scala.collection.mutable
import scala.language.implicitConversions


class ConfluentSchemaRegistry(server: String) extends RestService(server)

object SchemaRegistryLookUp {

  def getAllSubjectAndSchema(schemaURL: String): Map[String, (String, mutable.Map[Int, String])] = {

    try {
      val schemaRegistryClient = new ConfluentSchemaRegistry(schemaURL)
      val allSubjects = schemaRegistryClient.getAllSubjects.asScala
      val allSchemas = allSubjects.map {
        eachSubject =>
          val eachSubjectSchemas = getAllSchemasForSubject(eachSubject, schemaURL)
          (eachSubject, eachSubjectSchemas)
      }.toMap
      allSchemas
    }
    catch {
      case ex: Throwable =>
        ex.printStackTrace()
        throw new Exception("Unable to read schemas from Schema Registry -->" + schemaURL + "\n" + ex)
    }
  }

  def getAllSchemasForSubject(schemaSubject: String, avroSchemaURL: String): (String, mutable.Map[Int, String]) = {

    try {
      val schemaLookup: scala.collection.mutable.Map[Int, String] = scala.collection.mutable.Map()
      val schemaRegistryClient = new ConfluentSchemaRegistry(avroSchemaURL)
      val k = schemaRegistryClient.getAllVersions(schemaSubject).asScala
      val k2 = k.map { eachVersion =>
        val version = eachVersion.toString.toInt
        version -> schemaRegistryClient.getVersion(schemaSubject, version).getSchema
      }.toMap
      k2.foreach(entry => schemaLookup.put(entry._1, entry._2))
      val latestSchema = schemaRegistryClient.getLatestVersion(schemaSubject).getSchema
      (latestSchema, schemaLookup)
    }
    catch {
      case ex: Throwable =>
        ex.printStackTrace()
        throw new Exception("Unable to read schema subject '" + schemaSubject + "' from Schema Registry -->" + avroSchemaURL + "\n" + ex)
    }
  }

} 
Example 21
Source File: AvroDeserializerConfiguration.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.deserializers.generic.conf

import scala.collection.immutable.Map
import scala.collection.mutable
import scala.language.implicitConversions

import com.paypal.gimel.serde.common.schema.SchemaRegistryLookUp


class AvroDeserializerConfiguration(val props: Map[String, Any]) extends Serializable {

  val avroSchemaSource: String = props.getOrElse(GenericDeserializerConfigs.avroSchemaSourceKey, props.getOrElse(GenericDeserializerConfigs.avroSchemaSourceKafka1, "INLINE")).toString
  val avroSchemaURL: String = props.getOrElse(GenericDeserializerConfigs.avroSchemaSourceUrlKey, props.getOrElse(GenericDeserializerConfigs.avroSchemaSourceUrlKafka1, GenericDeserializerConstants.confluentSchemaURL)).toString
  val avroSchemaSubject: String = props.getOrElse(GenericDeserializerConfigs.avroSchemaSubjectKey, props.getOrElse(GenericDeserializerConfigs.avroSchemaSourceKeyKafka1, "")).toString
  val (avroSchemaLatest, avroSchemasSubject) =
  avroSchemaSource.toUpperCase() match {
    case GenericDeserializerConstants.avroSchemaCSR =>
      if (avroSchemaSubject.isEmpty) {
        throw new IllegalArgumentException (
          s"""
             | You need to provide schema subject with schema source ${GenericDeserializerConstants.avroSchemaCSR}.
             | Please set ${GenericDeserializerConfigs.avroSchemaSubjectKey} property.
           """.stripMargin)
      }
      val allSchemasSubject: (String, mutable.Map[Int, String]) =
        SchemaRegistryLookUp.getAllSchemasForSubject(avroSchemaSubject, avroSchemaURL)
      (allSchemasSubject._1, allSchemasSubject._2)
    case GenericDeserializerConstants.avroSchemaInline =>
      val avroSchemaString = props.getOrElse(GenericDeserializerConfigs.avroSchemaStringKey, props.getOrElse(GenericDeserializerConfigs.avroSchemaStringKeyKafka1, "")).toString
      if (avroSchemaString.isEmpty) {
        throw new IllegalArgumentException (
          s"""
             | You need to provide avro schema string with schema source ${GenericDeserializerConstants.avroSchemaInline}.
             | Please set ${GenericDeserializerConfigs.avroSchemaStringKey} property.
           """.stripMargin)
      }
      (avroSchemaString, None)
    case _ =>
      throw new IllegalArgumentException (
        s"""
           | Unknown value of Schema Source --> $avroSchemaSource.
           | Please set ${GenericDeserializerConfigs.avroSchemaSourceKey} property with either "${GenericDeserializerConstants.avroSchemaInline}" or "${GenericDeserializerConstants.avroSchemaCSR}" (Confluent Schema Registry).
           """.stripMargin)
  }
} 
Example 22
Source File: AvroSerializerConfiguration.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.serializers.generic.conf

import scala.collection.immutable.Map
import scala.collection.mutable
import scala.language.implicitConversions

import com.paypal.gimel.serde.common.schema.SchemaRegistryLookUp


class AvroSerializerConfiguration(val props: Map[String, Any]) extends Serializable {

  val avroSchemaSource: String = props.getOrElse(GenericSerializerConfigs.avroSchemaSourceKey, props.getOrElse(GenericSerializerConfigs.avroSchemaSourceKafka1, "INLINE")).toString
  val avroSchemaURL: String = props.getOrElse(GenericSerializerConfigs.avroSchemaSourceUrlKey, props.getOrElse(GenericSerializerConfigs.avroSchemaSourceUrlKafka1, GenericSerializerConstants.confluentSchemaURL)).toString
  val avroSchemaSubject: String = props.getOrElse(GenericSerializerConfigs.avroSchemaSubjectKey, props.getOrElse(GenericSerializerConfigs.avroSchemaSourceKeyKafka1, "")).toString
  val (avroSchemaLatest, avroSchemasSubject) =
  avroSchemaSource.toUpperCase() match {
    case GenericSerializerConstants.avroSchemaCSR =>
      if (avroSchemaSubject.isEmpty) {
        throw new IllegalArgumentException (
          s"""
             | You need to provide schema subject with schema source ${GenericSerializerConstants.avroSchemaCSR}.
             | Please set ${GenericSerializerConfigs.avroSchemaSubjectKey} property.
           """.stripMargin)
      }
      val allSchemasSubject: (String, mutable.Map[Int, String]) =
        SchemaRegistryLookUp.getAllSchemasForSubject(avroSchemaSubject, avroSchemaURL)
      (allSchemasSubject._1, allSchemasSubject._2)
    case GenericSerializerConstants.avroSchemaInline =>
      val avroSchemaString = props.getOrElse(GenericSerializerConfigs.avroSchemaStringKey, props.getOrElse(GenericSerializerConfigs.avroSchemaStringKeyKafka1, "")).toString
      if (avroSchemaString.isEmpty) {
        throw new IllegalArgumentException (
          s"""
             | You need to provide avro schema string with schema source ${GenericSerializerConstants.avroSchemaInline}.
             | Please set ${GenericSerializerConfigs.avroSchemaStringKey} property.
           """.stripMargin)
      }
      (avroSchemaString, None)
    case _ =>
      throw new IllegalArgumentException (
        s"""
           | Unknown value of Schema Source --> $avroSchemaSource.
           | Please set ${GenericSerializerConfigs.avroSchemaSourceKey} property with either "${GenericSerializerConstants.avroSchemaInline}" or "${GenericSerializerConstants.avroSchemaCSR}" (Confluent Schema Registry).
           """.stripMargin)
  }
} 
Example 23
Source File: SQLDataTypesUtils.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.serde.common.utils

import scala.collection.immutable.Map

import FieldsJsonProtocol._
import org.apache.spark.sql.types._
import spray.json._

object SQLDataTypesUtils {

  private val FIXED_DECIMAL = """decimal\(\s*(\d+)\s*,\s*(\-?\d+)\s*\)""".r

  
  def getSchemaFromBindToFieldsJson(fieldsBindToJSONString: String): StructType = {
    val fieldsBindTo = fieldsBindToJSONString.parseJson.convertTo[Array[Field]]
    val fieldNamesAndTypes = fieldsBindTo.map(x => (x.fieldName, x.fieldType)).toMap
    val fieldsToSQLDataTypeMap = getFieldNameSQLDataTypes(fieldNamesAndTypes)
    val schemaRDD: StructType = StructType(fieldNamesAndTypes.map(x =>
      StructField(x._1, fieldsToSQLDataTypeMap.getOrElse(x._1, StringType), true)).toArray )
    schemaRDD
  }
} 
Example 24
Source File: ConfluentSchemaRegistry.scala    From gimel   with Apache License 2.0 5 votes vote down vote up
package com.paypal.gimel.serde.common.schema

import scala.collection.JavaConverters._
import scala.collection.immutable.Map
import scala.collection.mutable
import scala.language.implicitConversions

import io.confluent.kafka.schemaregistry.client.rest.RestService


class ConfluentSchemaRegistry(server: String) extends RestService(server)

object SchemaRegistryLookUp {

  def getAllSubjectAndSchema(schemaURL: String): Map[String, (String, mutable.Map[Int, String])] = {

    try {
      val schemaRegistryClient = new ConfluentSchemaRegistry(schemaURL)
      val allSubjects = schemaRegistryClient.getAllSubjects.asScala
      val allSchemas = allSubjects.map {
        eachSubject =>
          val eachSubjectSchemas = getAllSchemasForSubject(eachSubject, schemaURL)
          (eachSubject, eachSubjectSchemas)
      }.toMap
      allSchemas
    }
    catch {
      case ex: Throwable =>
        ex.printStackTrace()
        throw ex
    }
  }

  def getAllSchemasForSubject(schemaSubject: String, avroSchemaURL: String): (String, mutable.Map[Int, String]) = {

    try {
      val schemaLookup: scala.collection.mutable.Map[Int, String] = scala.collection.mutable.Map()
      val schemaRegistryClient = new ConfluentSchemaRegistry(avroSchemaURL)
      val k = schemaRegistryClient.getAllVersions(schemaSubject).asScala
      val k2 = k.map { eachVersion =>
        val version = eachVersion.toString.toInt
        version -> schemaRegistryClient.getVersion(schemaSubject, version).getSchema
      }.toMap
      k2.foreach(entry => schemaLookup.put(entry._1, entry._2))
      val latestSchema = schemaRegistryClient.getLatestVersion(schemaSubject).getSchema
      (latestSchema, schemaLookup)
    }
    catch {
      case ex: Throwable =>
        ex.printStackTrace()
        throw ex
    }
  }

} 
Example 25
Source File: DerivingConfig.scala    From scalaz-deriving   with GNU Lesser General Public License v3.0 5 votes vote down vote up
// Copyright: 2017 - 2020 Sam Halliday
// License: http://www.gnu.org/licenses/lgpl-3.0.en.html

package scalaz.macros

import java.net.URL

import scala.Predef.{ wrapRefArray, ArrowAssoc }
import scala.collection.immutable.{ List, Map }
import scala.collection.JavaConverters._

private[scalaz] final case class DerivingConfig(targets: Map[String, String])
private[scalaz] object DerivingConfig {
  private type Result[T] = Either[String, T]
  private type Stringy   = Map[String, String]

  // cached to avoid hitting disk on every use of the macro.
  private[scalaz] lazy val targets: Result[Stringy] =
    getClass.getClassLoader
      .getResources("deriving.conf")
      .asScala
      .toList
      .map { res =>
        for {
          s <- readResource(res)
          c <- parseProperties(s)
        } yield c
      }
      .reverse // map addition means the last element wins
      .fold(EmptyResults) {
        // it's almost like we have a Monoid! Except, no, it's stdlib
        case (Right(m1), Right(m2)) => Right(m1 ++ m2)
        case (Left(e1), _)          => Left(e1)
        case (_, Left(e2))          => Left(e2)
      }
  private[this] val EmptyResults: Result[Stringy]   = Right(Map.empty)

  private[this] def parseProperties(config: String): Result[Stringy] =
    try Right(
      config
        .split("\n")
        .toList
        .filterNot(_.isEmpty)
        .filterNot(_.startsWith("#"))
        .map(_.split("=").toList)
        .map {
          case List(from, to) => from.trim -> to.trim
          case other          =>
            // I'd have used Left with traverse, but this is stdlib...
            throw new IllegalArgumentException(
              s"expected 2 parts but got ${other.size} in $other"
            )
        }
        .toMap
    )
    catch {
      case t: Throwable =>
        Left(t.getMessage)
    }

  private[this] def readResource(resUrl: URL): Either[String, String] =
    readInputStream(resUrl.openStream())

  private[this] def readInputStream(
    is: java.io.InputStream
  ): Either[String, String] =
    try {
      val baos     = new java.io.ByteArrayOutputStream()
      val data     = Array.ofDim[Byte](2048)
      var len: Int = 0
      def read(): Int = { len = is.read(data); len }
      while (read != -1)
        baos.write(data, 0, len)
      Right(baos.toString("UTF-8"))
    } catch {
      case t: Throwable => Left(t.getMessage)
    } finally is.close()

} 
Example 26
Source File: Interpreter.scala    From lift   with MIT License 5 votes vote down vote up
package ir.interpreter

import ir.ast._
import scala.collection.immutable.Map

object Interpreter {

  type ValueMap = scala.collection.immutable.Map[Param, Any]

  def apply(f: Lambda) = new {

    private def argsArrayToVector(argsWithArrays: Seq[Any]): Seq[Any] =
      argsWithArrays.map(arrayToVector)

    private def arrayToVector(arg: Any): Any = {
      arg match {
        case a: Array[_] => a.map(arrayToVector).toVector : Vector[_]
        case a: Any => a
      }
    }

    def ->[R] = new {
      def run(args: Any*): R = {
        val res = f.eval(Map[Param, Any](), argsArrayToVector(args): _*).asInstanceOf[R]
        res
      }

      def runAndFlatten(args: Any*)(implicit can: CanFlatten[R]): Seq[can.Elem] = {
        can.flatten(run(args:_*))
      }
    }
  }

  sealed trait InnerMost {
    implicit def innerSeq[A]: CanFlatten[Vector[A]] { type Elem = A } =
      new CanFlatten[Vector[A]] {
        type Elem = A
        def flatten(v: Vector[A]): Vector[A] = v
      }
  }
  object CanFlatten extends InnerMost {
    implicit def nestedI[A](implicit inner: CanFlatten[A])
    : CanFlatten[Vector[A]] { type Elem = inner.Elem } =
      new CanFlatten[Vector[A]] {
        type Elem = inner.Elem
        def flatten(v: Vector[A]) : Vector[inner.Elem] = v.flatMap(vi => inner.flatten(vi))
      }
  }
  sealed trait CanFlatten[-A] {
    type Elem
    def flatten(v: A): Vector[Elem]
  }

} 
Example 27
Source File: ElevationOverlay.scala    From geotrellis-osm-elevation   with Apache License 2.0 5 votes vote down vote up
package geotrellis.osme.core

import java.io.{BufferedWriter, FileWriter, File}

import com.vividsolutions.jts.geom.{LineString, MultiLineString}
import geotrellis.raster.io.geotiff.SinglebandGeoTiff
import geotrellis.vector.io.json.{GeoJson, JsonFeatureCollection}
import scala.collection.immutable.Map
import spray.json._
import DefaultJsonProtocol._
import geotrellis.vector.io.json.FeatureFormats.writeFeatureJson
import geotrellis.vector.io.json.GeometryFormats._
import geotrellis.vector.densify.DensifyMethods
import geotrellis.vector.dissolve.DissolveMethods
import geotrellis.vector._



    val segmentsFeatures = segments.map { segment =>
       val center = segment.centroid match {
         case PointResult(p) => p
         case NoResult => throw new Exception("No result found in PointOrNoResult")
       }
       val (col, row) = rasterExtent.mapToGrid(center)
       val elevation = geotiff.tile.getDouble(col, row)
       val meanvMap: Map[String, Double] = Map("MEANV" -> elevation)
       LineFeature(segment, meanvMap)
     }

    return segmentsFeatures.toTraversable


  }
} 
Example 28
Source File: PlatformStatistics.scala    From coral   with Apache License 2.0 5 votes vote down vote up
package io.coral.cluster

import io.coral.actors.Counter
import scala.collection.immutable.Map


case class PlatformStatistics(// The total number of runtimes on the platform
							  totalRuntimes: Int,
							  // The total number of actors in the system
							  totalActors: Int,
							  // The start time of the platform
							  runningSince: Long,
							  // The total number of messages processed
							  totalMessages: Long,
							  // The total number of exceptions thrown in all runtimes
							  nrExceptions: Long,
							  // A list of all machines of the platform.
							  members: List[Machine],
							  // ... other monitoring metrics here
							  // Any runtime-specific monitoring metrics in this map
							  children: Map[String, Counter]) 
Example 29
Source File: ConcordanceParser.scala    From Scalaprof   with GNU General Public License v2.0 5 votes vote down vote up
package com.phasmid.concordance

import scala.util.parsing.combinator._
import scala.util.parsing.input.Positional
import scala.io.Source
import scala.collection.immutable.Map


class ConcordanceParser extends RegexParsers {
  val rWord = """[\w’]+[,;\.\-\?\!\—]?""".r
  def word: Parser[PositionalString] = positioned(regex(rWord) ^^ {w => PositionalString(w)})
  def sentence: Parser[Seq[PositionalString]] = rep(word)
}

case class PositionalString(s: String) extends Positional

object ConcordanceParser {
 
  def main(args: Array[String]): Unit = {
    val docs = for (f <- args) yield Source.fromFile(f).mkString
    val concordance = for (i <- 0 to docs.length-1) yield (args(i),parseDoc(docs(i)))
    println(concordance)
    // an alternative way of looking at the data (gives doc, page, line and char numbers with each string)
    val q = for {(d,xxxx) <- concordance; (p,xxx) <- xxxx; (l,xx) <- xxx; (_,c,x) <- xx} yield (d, p,l,c,x)
    println(q)
    // yet another way to look at the data
    val concordanceMap = concordance.toMap
    println(concordanceMap)
  }
  
  def parseDoc(content: String) = {
    val pages = for (p <- content.split("/p")) yield p
    for (i <- 0 to pages.length-1) yield (i+1,parsePage(pages(i)))
  }

  def parsePage(content: String) = {
    val lines = for (l <- content.split("\n")) yield l
    for (i <- 0 to lines.length-1) yield (i+1,parseLine(lines(i)))
  }

  def parseLine(line: String): Seq[(Int,Int,String)] = {
    def tidy(s: String) = s.replaceAll("""[,;\.\-\?\!\—]""", "")
    val p = new ConcordanceParser
    val r = p.parseAll(p.sentence,line) match {
      case p.Success(ws,_) => ws
      case p.Failure(e,_) => println(e); List()
      case _ => println("PositionalParser: logic error"); List()
    }
    r map {case p @ PositionalString(s) => (p.pos.line,p.pos.column,tidy(s).toLowerCase)}
  }
} 
Example 30
Source File: RddToDataFrame.scala    From spark-sframe   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package org.apache.spark.turi

import org.graphlab.create.GraphLabUtil
import org.apache.spark.sql.{SQLContext, Row, DataFrame}
import org.apache.spark.rdd.RDD
import scala.collection.JavaConversions._
import org.apache.spark.sql.types._
import scala.collection.mutable.ListBuffer
import scala.collection.mutable.ArrayBuffer
import scala.collection.immutable.Map
import java.util.HashMap
import java.util.ArrayList
import java.util.{Date,GregorianCalendar}
import java.sql.Date

object EvaluateRDD {
  
  def inferSchema(obj: Any): DataType = {
    if(obj.isInstanceOf[Int]) { 
      IntegerType
    } else if(obj.isInstanceOf[String]) { 
      StringType
    } else if(obj.isInstanceOf[Double]) { 
      DoubleType
    } else if(obj.isInstanceOf[Long]) { 
      LongType
    } else if(obj.isInstanceOf[Float]) { 
      FloatType
    } else if(obj.isInstanceOf[Map[_,_]]) {
      MapType(inferSchema(obj.asInstanceOf[Map[_,_]].head._1),inferSchema(obj.asInstanceOf[Map[_,_]].head._2))
    } else if(obj.isInstanceOf[java.util.HashMap[_,_]]) {
      MapType(inferSchema(obj.asInstanceOf[java.util.HashMap[_,_]].head._1),inferSchema(obj.asInstanceOf[java.util.HashMap[_,_]].head._2))
    } else if(obj.isInstanceOf[Array[_]]) {
      ArrayType(inferSchema(obj.asInstanceOf[Array[_]](0)))
    } else if(obj.isInstanceOf[java.util.ArrayList[_]]) {
      ArrayType(inferSchema(obj.asInstanceOf[java.util.ArrayList[_]](0)))
    } else if(obj.isInstanceOf[java.util.GregorianCalendar]) {
      TimestampType
    } else if(obj.isInstanceOf[java.util.Date] || obj.isInstanceOf[java.sql.Date]) {
      DateType
    } else { 
      StringType
    }
  }

  def toScala(obj: Any): Any = {
    if (obj.isInstanceOf[java.util.HashMap[_,_]]) {
      val jmap = obj.asInstanceOf[java.util.HashMap[_,_]]
      jmap.map { case (k,v) => toScala(k) -> toScala(v) }.toMap
    }
    else if(obj.isInstanceOf[java.util.ArrayList[_]]) {
      val buf = ArrayBuffer[Any]()
      val jArray = obj.asInstanceOf[java.util.ArrayList[_]]
      for(item <- jArray) {
        buf += toScala(item)
      }
      buf.toArray
    } else if(obj.isInstanceOf[java.util.GregorianCalendar]) {
      new java.sql.Timestamp(obj.asInstanceOf[java.util.GregorianCalendar].getTime().getTime())
    } else {
      obj
    }
  }
  def toSparkDataFrame(sqlContext: SQLContext, rdd: RDD[java.util.HashMap[String,_]]): DataFrame = { 
    val scalaRDD = rdd.map(l => toScala(l))
    val rowRDD = scalaRDD.map(l => Row.fromSeq(l.asInstanceOf[Map[_,_]].values.toList))
    
    var sample_data: java.util.HashMap[String,_] = rdd.take(1)(0)
    
    var schema_list: ListBuffer[StructField] = new ListBuffer[StructField]()
    for ((name,v) <- sample_data) { 
      schema_list.append(StructField(name,inferSchema(v)))
    }
    sqlContext.createDataFrame(rowRDD,StructType(schema_list))
  }
} 
Example 31
Source File: NativeHttpTest.scala    From reactive-cli   with Apache License 2.0 5 votes vote down vote up
package com.lightbend.rp.reactivecli.http

import scala.collection.immutable.Map
import utest._

object NativeHttpTest extends TestSuite {
  val tests = this{
    "Parse HTTP headers" - {
      // No header field value
      assert(NativeHttp.parseHeaders(Some("HTTP/1.1 200 OK\r\nAccept:")) == Map(
        "Accept" -> ""))

      // Real-world punctuation
      assert(NativeHttp.parseHeaders(Some(
        """HTTP/1.1 200 OK
          |Date: Mon, 07 May 2018 08:43:13 GMT
          |Expires: -1
          |Cache-Control: private, max-age=0
          |Content-Type: text/html; charset=ISO-8859-1""".stripMargin.replaceAll("\n", "\r\n"))) == Map(
        "Date" -> "Mon, 07 May 2018 08:43:13 GMT",
        "Expires" -> "-1",
        "Cache-Control" -> "private, max-age=0",
        "Content-Type" -> "text/html; charset=ISO-8859-1"))

      // Multiline field values
      assert(NativeHttp.parseHeaders(Some(
        """HTTP/1.1 200 OK
          |Date: Mon, 07 May 2018
          | 08:43:13 GMT
          |Expires: -1
          |Cache-Control: private,
          |               max-age=0
          |Content-Type: text/html; charset=ISO-8859-1""".stripMargin.replaceAll("\n", "\r\n"))) == Map(
        "Date" -> "Mon, 07 May 2018 08:43:13 GMT",
        "Expires" -> "-1",
        "Cache-Control" -> "private, max-age=0",
        "Content-Type" -> "text/html; charset=ISO-8859-1"))
    }

    "Parse headers not following HTTP spec" - {
      // Empty lines before and after HTTP status
      assert(NativeHttp.parseHeaders(Some("\r\n\r\n\r\nHTTP/1.1 200 OK\r\n \r\nAccept:\r\n")) == Map(
        "Accept" -> ""))

      // No colon separator
      assert(NativeHttp.parseHeaders(Some(
        """HTTP/1.1 200 OK
          |Accept: *
          |Date Mon, 07 May 2018 08:43:13 GMT
          |Expires: -1""".stripMargin.replaceAll("\n", "\r\n"))) == Map(
        "Accept" -> "*",
        "Expires" -> "-1"))
    }
  }
} 
Example 32
Source File: CoproductStrategyTests.scala    From case-classy   with Apache License 2.0 5 votes vote down vote up
package classy
package generic
package derive

import predef._
import scala.Predef.ArrowAssoc

import org.scalacheck._
import org.scalacheck.Prop._

import scala.collection.immutable.Map
import scala.reflect.ClassTag

class CoproductStrategyTests extends Properties("CoproductStrategy") {
  import DecodeError._

  implicit def readMap[A](implicit evA: ClassTag[A]): Read[Map[String, Any], A] = Read.instance(
    path => Decoder.instance(_.get(path) match {
      case Some(evA(a)) => a.right
      case _            => AtPath(path, Missing).left
    }))

  case class Entry[A, B](
    input: A,
    decoder: Decoder[A, B],
    name: String,
    output: Either[DecodeError, B]
  )

  val nestedEntries: List[Entry[Map[String, Any], String]] = List(
    Entry(
      Map("foo" -> Map("foo" -> "bar")),
      readMap[String].apply("foo"),
      "Foo",
      "bar".right
    ),
    Entry(
      Map.empty,
      readMap[String].apply("foo"),
      "Foo",
      AtPath("foo", Missing).left
    ),
    Entry(
      Map("foo" -> Map.empty),
      readMap[String].apply("foo"),
      "Foo",
      AtPath("foo", AtPath("foo", Missing)).left
    )
  )

  val nested = CoproductStrategy.Nested()

  property("Nested") =
    nestedEntries
      .map(entry => nested.decoder(entry.decoder, entry.name).apply(entry.input) ?= entry.output)
      .reduce(_ && _)

  val typedEntries: List[Entry[Map[String, Any], String]] = List(
    Entry(
      Map("type" -> "foo", "foo" -> "bar"),
      readMap[String].apply("foo"),
      "Foo",
      "bar".right
    ),
    Entry(
      Map("type" -> "foo"),
      readMap[String].apply("foo"),
      "Foo",
      AtPath("foo", Missing).left
    )
  )

  val typed = CoproductStrategy.Typed()

  property("Typed") =
    typedEntries
      .map(entry => typed.decoder(entry.decoder, entry.name).apply(entry.input) ?= entry.output)
      .reduce(_ && _)

} 
Example 33
Source File: MapKOps.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.lf.data

import scala.language.higherKinds
import scala.collection.GenTraversableOnce
import scala.collection.immutable.{Map, MapLike}


trait MapKOps[K, +V, +This[+TV] <: Map[K, TV] with MapKOps[K, TV, This]]
    extends MapLike[K, V, This[V]] { this: This[V] =>
  override def updated[V1 >: V](key: K, value: V1): This[V1] = this + ((key, value))
  override def +[V1 >: V](kv: (K, V1)): This[V1]
  override def +[V1 >: V](elem1: (K, V1), elem2: (K, V1), elems: (K, V1)*): This[V1] =
    this + elem1 + elem2 ++ elems
  override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): This[V1] =
    xs.seq.foldLeft(this: This[V1])(_ + _)
} 
Example 34
Source File: Interface.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.lf
package iface

import java.{util => j}

import com.daml.lf.data.ImmArray.ImmArraySeq
import com.daml.lf.data.Ref.{PackageId, QualifiedName}
import com.daml.lf.iface.reader.Errors
import com.daml.daml_lf_dev.DamlLf

import scala.collection.JavaConverters._
import scala.collection.immutable.Map

sealed abstract class InterfaceType extends Product with Serializable {
  def `type`: DefDataType.FWT

  def fold[Z](normal: DefDataType.FWT => Z, template: (Record.FWT, DefTemplate[Type]) => Z): Z =
    this match {
      case InterfaceType.Normal(typ) => normal(typ)
      case InterfaceType.Template(typ, tpl) => template(typ, tpl)
    }

  
final case class Interface(packageId: PackageId, typeDecls: Map[QualifiedName, InterfaceType]) {
  def getTypeDecls: j.Map[QualifiedName, InterfaceType] = typeDecls.asJava
}

object Interface {
  import Errors._
  import reader.InterfaceReader._

  def read(lf: DamlLf.Archive): (Errors[ErrorLoc, InvalidDataTypeDefinition], Interface) =
    readInterface(lf)

  def read(lf: (PackageId, DamlLf.ArchivePayload))
    : (Errors[ErrorLoc, InvalidDataTypeDefinition], Interface) =
    readInterface(lf)

} 
Example 35
Source File: EnvironmentInterface.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.lf
package iface

import com.daml.lf.archive.Dar
import data.Ref.Identifier

import scala.collection.immutable.Map
import scalaz.syntax.std.map._
import scalaz.Semigroup


final case class EnvironmentInterface(typeDecls: Map[Identifier, InterfaceType])

object EnvironmentInterface {
  def fromReaderInterfaces(i: Interface, o: Interface*): EnvironmentInterface =
    EnvironmentInterface((i +: o).iterator.flatMap {
      case Interface(packageId, typeDecls) =>
        typeDecls mapKeys (Identifier(packageId, _))
    }.toMap)

  def fromReaderInterfaces(dar: Dar[Interface]): EnvironmentInterface =
    fromReaderInterfaces(dar.main, dar.dependencies: _*)

  implicit val environmentInterfaceSemigroup: Semigroup[EnvironmentInterface] = Semigroup instance {
    (f1, f2) =>
      EnvironmentInterface(f1.typeDecls ++ f2.typeDecls)
  }
} 
Example 36
Source File: EventDecoderApi.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.client.binding

import com.daml.ledger.api.refinements.ApiTypes

import scala.collection.immutable.{Map, Seq}
import scalaz.Id.Id
import com.daml.ledger.api.v1.{event => rpcevent, value => rpcvalue}

abstract class EventDecoderApi(val templateTypes: Seq[TemplateCompanion[_]]) {

  @SuppressWarnings(Array("org.wartremover.warts.Any"))
  val decoderTable: Map[ApiTypes.TemplateId, rpcevent.CreatedEvent => Option[Template[_]]] =
    templateTypes.map(_.decoderEntry).toMap

  private[this] val dtl = {
    type F[A] = A => Option[rpcevent.CreatedEvent => Option[Template[_]]]
    ApiTypes.TemplateId.unsubst[F, rpcvalue.Identifier](decoderTable.lift)
  }

  @SuppressWarnings(Array("org.wartremover.warts.Any"))
  final def createdEventToContractRef(
      createdEvent: rpcevent.CreatedEvent): Either[EventDecoderError, Contract.OfAny] = {
    for {
      templateToContract <- createdEvent.templateId flatMap dtl toRight DecoderTableLookupFailure
      tadt <- templateToContract(createdEvent).toRight(
        CreateEventToContractMappingError: EventDecoderError)
    } yield
      Contract(
        Primitive.substContractId[Id, Nothing](ApiTypes.ContractId(createdEvent.contractId)),
        tadt,
        createdEvent.agreementText,
        createdEvent.signatories,
        createdEvent.observers,
        createdEvent.contractKey
      )
  }
} 
Example 37
Source File: Portfolio.scala    From matcher   with MIT License 5 votes vote down vote up
package com.wavesplatform.dex.domain.state

import cats.kernel.instances.map._
import cats.{Monoid, Semigroup}
import com.wavesplatform.dex.domain.asset.Asset
import com.wavesplatform.dex.domain.asset.Asset.{IssuedAsset, Waves}

import scala.collection.immutable.Map

case class Portfolio(balance: Long, lease: LeaseBalance, assets: Map[IssuedAsset, Long]) {

  lazy val spendableBalance: Long = balance - lease.out

  def balanceOf(assetId: Asset): Long = assetId match {
    case Waves                  => balance
    case asset @ IssuedAsset(_) => assets.getOrElse(asset, 0L)
  }
}

object Portfolio {

  val empty: Portfolio = Portfolio(0L, Monoid[LeaseBalance].empty, Map.empty)

  implicit val longSemigroup: Semigroup[Long] = (x: Long, y: Long) => safeSum(x, y)

  implicit val monoid: Monoid[Portfolio] = new Monoid[Portfolio] {
    override val empty: Portfolio = Portfolio.empty
    override def combine(older: Portfolio, newer: Portfolio): Portfolio = Portfolio(
      balance = safeSum(older.balance, newer.balance),
      lease = Monoid.combine(older.lease, newer.lease),
      assets = Monoid.combine(older.assets, newer.assets)
    )
  }

  implicit class PortfolioExt(self: Portfolio) {

    def spendableBalanceOf(assetId: Asset): Long = assetId.fold(self.spendableBalance)(self.assets.getOrElse(_, 0L))

    def assetIds: Set[Asset] = self.assets.keySet ++ Set(Waves)

    def changedAssetIds(that: Portfolio): Set[Asset] = {
      val a1 = assetIds
      val a2 = that.assetIds

      val intersection = a1 & a2
      val sureChanged  = (a1 | a2) -- intersection

      intersection.filter(x => spendableBalanceOf(x) != that.spendableBalanceOf(x)) ++ sureChanged
    }
  }
} 
Example 38
Source File: IterableDecoratorTest.scala    From scala-collection-contrib   with Apache License 2.0 5 votes vote down vote up
package scala.collection
package decorators

import org.junit.{Assert, Test}

import scala.collection.immutable.{LazyList, List, Map, Range}

class IterableDecoratorTest {

  @Test
  def foldSomeLeft(): Unit = {
      val r = Range(0, 100)
      Assert.assertEquals(0, r.foldSomeLeft(0)((x, y) => None))
      Assert.assertEquals(10, r.foldSomeLeft(0)((x, y) => if (y > 10) None else Some(y)))
      Assert.assertEquals(55, r.foldSomeLeft(0)((x, y) => if (y > 10) None else Some(x + y)))
      Assert.assertEquals(4950, r.foldSomeLeft(0)((x, y) => Some(x + y)))

      Assert.assertEquals(10, List[Int]().foldSomeLeft(10)((x, y) => Some(x + y)))
    }

  @Test
  def lazyFoldLeftIsStackSafe(): Unit = {
    val bigList = List.range(1, 50000)
    def sum(as: Iterable[Int]): Int =
      as.lazyFoldLeft(0)(_ + _)

    Assert.assertEquals(sum(bigList), 1249975000)
  }

  @Test
  def lazyFoldLeftIsLazy(): Unit = {
    val nats = LazyList.from(0)
    def exists[A](as: Iterable[A])(f: A => Boolean): Boolean =
      as.lazyFoldLeft(false)(_ || f(_))
    
    Assert.assertTrue(exists(nats)(_ > 100000))
  }

  @Test def lazyFoldRightIsLazy(): Unit = {
    val xs = LazyList.from(0)
    def chooseOne(x: Int): Either[Int, Int => Int]= if (x < (1 << 16)) Right(identity) else Left(x)

    Assert.assertEquals(1 << 16, xs.lazyFoldRight(0)(chooseOne))
  }

  @Test
  def hasIterableOpsWorksWithStringAndMap(): Unit = {
    val result = "foo".foldSomeLeft(0) { case (_, 'o') => None case (n, _) => Some(n + 1) }
    Assert.assertEquals(1, result)

    val result2 =
      Map(1 -> "foo", 2 -> "bar").foldSomeLeft(0) {
        case (n, (k, _)) => if (k == -1) None else Some(n + 1)
      }
    Assert.assertEquals(2, result2)
  }

  @Test
  def splitByShouldHonorEmptyIterator(): Unit = {
    val split = Vector.empty[Int].splitBy(identity)
    Assert.assertEquals(Vector.empty, split)
  }

  @Test
  def splitByShouldReturnSingleSeqWhenSingleElement(): Unit = {
    val value = Vector("1")
    val split = value.splitBy(identity)
    Assert.assertEquals(Vector(value), split)
  }

  @Test
  def splitByShouldReturnSingleSeqWhenAllElHaveTheSameKey(): Unit = {
    val value = Vector("1", "1", "1")
    val split = value.splitBy(identity)
    Assert.assertEquals(Vector(value), split)
  }

  @Test
  def splitByShouldReturnVectorOfVectorOrConsecutiveElementsWithTheSameKey(): Unit = {
    val value = Vector("1", "2", "2", "3", "3", "3", "2", "2")
    val split: Vector[Vector[String]] = value.splitBy(identity)
    Assert.assertEquals(Vector(Vector("1"), Vector("2", "2"), Vector("3", "3", "3"), Vector("2", "2")), split)
  }

  @Test
  def splitByShouldReturnListOfListOfConsecutiveElementsWithTheSameKey(): Unit = {
    val value = List("1", "2", "2", "3", "3", "3", "2", "2")
    val split: List[List[String]] = value.splitBy(identity)
    Assert.assertEquals(List(List("1"), List("2", "2"), List("3", "3", "3"), List("2", "2")), split)
  }

  @Test
  def splitByShouldReturnSetOfSetOfConsecutiveElementsWithTheSameKey(): Unit = {
    val value = Set("1", "2", "2", "3", "3", "3", "2", "2")
    val split: Set[Set[String]] = value.splitBy(identity)
    Assert.assertEquals(Set(Set("1"), Set("2"), Set("3")), split)
  }
} 
Example 39
Source File: KafkaConsumerProxy.scala    From hydra   with Apache License 2.0 5 votes vote down vote up
package hydra.kafka.consumer

import akka.actor.Actor
import akka.pattern.pipe
import hydra.kafka.consumer.KafkaConsumerProxy._
import hydra.kafka.util.KafkaUtils
import org.apache.kafka.clients.consumer.Consumer
import org.apache.kafka.common.{PartitionInfo, TopicPartition}

import scala.collection.JavaConverters._
import scala.collection.immutable.Map
import scala.concurrent.Future

class KafkaConsumerProxy extends Actor {

  private var _defaultConsumer: Consumer[String, String] = _

  private implicit val ec = context.dispatcher

  override def preStart(): Unit = {
    _defaultConsumer = KafkaUtils.stringConsumerSettings.createKafkaConsumer()
  }

  override def receive: Receive = {
    case GetLatestOffsets(topic) =>
      val requestor = sender
      pipe(latestOffsets(topic).map(LatestOffsetsResponse(topic, _))) to requestor

    case GetPartitionInfo(topic) =>
      val requestor = sender
      pipe(partitionInfo(topic).map(PartitionInfoResponse(topic, _))) to requestor

    case ListTopics =>
      val requestor = sender
      pipe(listTopics().map(ListTopicsResponse(_))) to requestor
  }

  override def postStop(): Unit = {
    _defaultConsumer.close()
  }

  private def latestOffsets(
      topic: String
  ): Future[Map[TopicPartition, Long]] = {
    Future {
      val ts = _defaultConsumer
        .partitionsFor(topic)
        .asScala
        .map(pi => new TopicPartition(topic, pi.partition()))
      _defaultConsumer
        .endOffsets(ts.asJava)
        .asScala
        .map(tp => tp._1 -> tp._2.toLong)
        .toMap
    }
  }

  private def partitionInfo(topic: String): Future[Seq[PartitionInfo]] =
    Future(_defaultConsumer.partitionsFor(topic).asScala)

  private def listTopics(): Future[Map[String, Seq[PartitionInfo]]] = {
    Future(_defaultConsumer.listTopics().asScala.toMap)
      .map(res => res.mapValues(_.asScala.toSeq))
  }

}

object KafkaConsumerProxy {

  case class GetLatestOffsets(topic: String)

  case class LatestOffsetsResponse(
      topic: String,
      offsets: Map[TopicPartition, Long]
  )

  case class GetPartitionInfo(topic: String)

  case class PartitionInfoResponse(
      topic: String,
      partitionInfo: Seq[PartitionInfo]
  )

  case object ListTopics

  case class ListTopicsResponse(topics: Map[String, Seq[PartitionInfo]])

} 
Example 40
Source File: TopicsEndpoint.scala    From hydra   with Apache License 2.0 5 votes vote down vote up
package hydra.kafka.endpoints

import akka.actor.ActorSelection
import akka.http.scaladsl.common.EntityStreamingSupport
import akka.kafka.Subscriptions
import akka.kafka.scaladsl.Consumer
import akka.pattern.ask
import akka.util.Timeout
import hydra.core.http.RouteSupport
import hydra.kafka.consumer.KafkaConsumerProxy.{GetLatestOffsets, LatestOffsetsResponse}
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition

import scala.collection.immutable.Map
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext, Future}


class TopicsEndpoint(consumerProxy:ActorSelection)(implicit ec:ExecutionContext) extends RouteSupport {

  import hydra.kafka.util.KafkaUtils._

  implicit val jsonStreamingSupport = EntityStreamingSupport.json()

  override val route =
    path("transports" / "kafka" / "consumer" / "topics" / Segment) {
      topicName =>
        get {
          extractRequestContext { ctx =>
            parameters('format.?, 'group.?, 'n ? 10, 'start ? "earliest") {
              (format, groupId, n, startOffset) =>
                val settings = loadConsumerSettings[Any, Any](
                  format.getOrElse("avro"),
                  groupId.getOrElse("hydra"),
                  startOffset
                )
                val offsets = latestOffsets(topicName)
                val source = Consumer
                  .plainSource(settings, Subscriptions.topics(topicName))
                  .initialTimeout(5.seconds)
                  .zipWithIndex
                  .takeWhile(rec =>
                    rec._2 <= n && !shouldCancel(offsets, rec._1)
                  )
                  .map(rec => rec._1.value().toString)
                  .watchTermination()((_, termination) =>
                    termination.failed.foreach {
                      case cause => ctx.fail(cause)
                    }
                  )
                complete(source)

            }
          }
        }
    }

  def shouldCancel(
      fpartitions: Future[Map[TopicPartition, Long]],
      record: ConsumerRecord[Any, Any]
  ): Boolean = {
    if (fpartitions.isCompleted) {
      val partitions = Await.result(fpartitions, 1.millis)
      val tp = new TopicPartition(record.topic(), record.partition())
      partitions.get(tp) match {
        case Some(offset) => record.offset() >= offset
        case None         => false
      }
    } else {
      false
    }

  }

  private def latestOffsets(
      topic: String
  ): Future[Map[TopicPartition, Long]] = {
    implicit val timeout = Timeout(5 seconds)
    (consumerProxy ? GetLatestOffsets(topic))
      .mapTo[LatestOffsetsResponse]
      .map(_.offsets)
  }

} 
Example 41
Source File: Heartbeat.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey.performer

import akka.actor.ActorRef
import org.apache.iota.fey.FeyGenericActor

import scala.collection.immutable.Map
import scala.concurrent.duration._

class Heartbeat(override val params: Map[String, String] = Map.empty,
                override val backoff: FiniteDuration = 1.minutes,
                override val connectTo: Map[String, ActorRef] = Map.empty,
                override val schedulerTimeInterval: FiniteDuration = 30.seconds,
                override val orchestrationName: String = "",
                override val orchestrationID: String = "",
                override val autoScale: Boolean = false) extends FeyGenericActor {

  override def onStart : Unit = {
  }

  override def onStop : Unit=  {
  }

  override def onRestart(reason: Throwable) : Unit = {
    // Called after actor is up and running - after self restart
  }

  override def customReceive: Receive = {
    case x => log.debug(s"Untreated $x")
  }

  override def processMessage[T](message: T, sender: ActorRef): Unit = {
  }

  override def execute() : Unit = {
    log.debug("alive")
    propagateMessage("alive")
  }

} 
Example 42
Source File: RandomDouble.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey.performer

import akka.actor.ActorRef
import org.apache.iota.fey.FeyGenericActor

import scala.collection.immutable.Map
import scala.concurrent.duration._

class RandomDouble(override val params: Map[String, String] = Map.empty,
                   override val backoff: FiniteDuration = 1.minutes,
                   override val connectTo: Map[String, ActorRef] = Map.empty,
                   override val schedulerTimeInterval: FiniteDuration = 30.seconds,
                   override val orchestrationName: String = "",
                   override val orchestrationID: String = "",
                   override val autoScale: Boolean = false) extends FeyGenericActor {

  override def onStart : Unit = {
  }

  override def onStop : Unit = {
  }

  override def onRestart(reason: Throwable) : Unit = {
    // Called after actor is up and running - after self restart
  }

  override def customReceive: Receive = {
    case x => log.debug(s"Untreated $x")
  }

  override def processMessage[T](message: T, sender: ActorRef): Unit = {
  }

  override def execute() : Unit = {
    val rd = scala.util.Random.nextGaussian().toString
    log.debug(rd)
    propagateMessage(rd)
  }

} 
Example 43
Source File: RandomUUID.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey.performer

import akka.actor.ActorRef
import org.apache.iota.fey.FeyGenericActor

import scala.collection.immutable.Map
import scala.concurrent.duration._

class RandomUUID(override val params: Map[String, String] = Map.empty,
                 override val backoff: FiniteDuration = 1.minutes,
                 override val connectTo: Map[String, ActorRef] = Map.empty,
                 override val schedulerTimeInterval: FiniteDuration = 30.seconds,
                 override val orchestrationName: String = "",
                 override val orchestrationID: String = "",
                 override val autoScale: Boolean = false) extends FeyGenericActor {

  override def onStart : Unit = {
  }

  override def onStop : Unit = {
  }

  override def onRestart(reason: Throwable) : Unit = {
    // Called after actor is up and running - after self restart
  }

  override def customReceive: Receive = {
    case x => log.debug(s"Untreated $x")
  }

  override def processMessage[T](message: T, sender: ActorRef): Unit = {
  }

  override def execute() : Unit = {
    val uuid = java.util.UUID.randomUUID.toString
    log.debug(uuid)
    propagateMessage(uuid)
  }

} 
Example 44
Source File: RandomInteger.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey.performer

import akka.actor.ActorRef
import org.apache.iota.fey.FeyGenericActor

import scala.collection.immutable.Map
import scala.concurrent.duration._

class RandomInteger(override val params: Map[String, String] = Map.empty,
                    override val backoff: FiniteDuration = 1.minutes,
                    override val connectTo: Map[String, ActorRef] = Map.empty,
                    override val schedulerTimeInterval: FiniteDuration = 30.seconds,
                    override val orchestrationName: String = "",
                    override val orchestrationID: String = "",
                    override val autoScale: Boolean = false) extends FeyGenericActor {

  override def onStart : Unit = {
  }

  override def onStop : Unit = {
  }

  override def onRestart(reason: Throwable) : Unit = {
    // Called after actor is up and running - after self restart
  }

  override def customReceive: Receive = {
    case x => log.debug(s"Untreated $x")
  }

  override def processMessage[T](message: T, sender: ActorRef): Unit = {
  }

  override def execute() : Unit = {
    val ri = scala.util.Random.nextInt().toString
    log.debug(ri)
    propagateMessage(ri)
  }

} 
Example 45
Source File: Timestamp.scala    From incubator-retired-iota   with Apache License 2.0 5 votes vote down vote up
package org.apache.iota.fey.performer

import akka.actor.ActorRef
import org.apache.iota.fey.FeyGenericActor

import scala.collection.immutable.Map
import scala.concurrent.duration._

class Timestamp(override val params: Map[String, String] = Map.empty,
                override val backoff: FiniteDuration = 1.minutes,
                override val connectTo: Map[String, ActorRef] = Map.empty,
                override val schedulerTimeInterval: FiniteDuration = 30.seconds,
                override val orchestrationName: String = "",
                override val orchestrationID: String = "",
                override val autoScale: Boolean = false) extends FeyGenericActor {

  override def onStart : Unit = {
  }

  override def onStop : Unit = {
  }

  override def onRestart(reason: Throwable) : Unit = {
    // Called after actor is up and running - after self restart
  }

  override def customReceive: Receive = {
    case x => log.debug(s"Untreated $x")
  }

  override def processMessage[T](message: T, sender: ActorRef): Unit = {
  }

  override def execute() : Unit = {
    val ts = java.lang.System.currentTimeMillis().toString
    log.debug(ts)
    propagateMessage(ts)
  }

} 
Example 46
Source File: AuthValidator.scala    From maha   with Apache License 2.0 5 votes vote down vote up
package com.yahoo.maha.core.auth

import play.api.Configuration
import play.api.mvc.{RequestHeader, Result, Results}

import scala.collection.immutable.Map

case class ValidationResult(success: Boolean, user: Option[String])

trait AuthValidator {
  def init(configuration: Configuration)
  def validate(requestHeader: RequestHeader) : ValidationResult
  def handleAuthCallback(requestHeader: RequestHeader) : Result
  def handleAuthFailure(requestHeader: RequestHeader) : Result
}

class DefaultAuthValidator extends AuthValidator {
  override def init(configuration: Configuration): Unit = {
  }

  override def validate(requestHeader: RequestHeader): ValidationResult = {
    ValidationResult(success = true, user = None)
  }

  override def handleAuthCallback(requestHeader: RequestHeader): Result = {
    Results.Ok
  }

  override def handleAuthFailure(requestHeader: RequestHeader): Result = {
    Results.Ok
  }
}

trait DruidAuthHeaderProvider {
  def init(configuration: Configuration)
  def getAuthHeaders : Map[String, String]
}

class DefaultDruidAuthHeaderProvider extends DruidAuthHeaderProvider {
  override def init(configuration: Configuration): Unit = {
  }

  override def getAuthHeaders: Map[String, String] = {
    Map.empty
  }
} 
Example 47
Source File: LedgerReader.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.ledger.service

import com.daml.lf.archive.Reader
import com.daml.lf.data.Ref.{Identifier, PackageId}
import com.daml.lf.iface.reader.InterfaceReader
import com.daml.lf.iface.{DefDataType, Interface}
import com.daml.daml_lf_dev.DamlLf
import com.daml.ledger.api.v1.package_service.GetPackageResponse
import com.daml.ledger.client.services.pkg.PackageClient
import scalaz.Scalaz._
import scalaz._

import scala.collection.immutable.Map
import scala.concurrent.Future

object LedgerReader {

  type Error = String

  // PackageId -> Interface
  type PackageStore = Map[String, Interface]

  val UpToDate: Future[Error \/ Option[PackageStore]] =
    Future.successful(\/-(None))

  // FIXME Find a more suitable execution context for these helpers
  import scala.concurrent.ExecutionContext.Implicits.global

  
  def loadPackageStoreUpdates(client: PackageClient, token: Option[String])(
      loadedPackageIds: Set[String]): Future[Error \/ Option[PackageStore]] =
    for {
      newPackageIds <- client.listPackages(token).map(_.packageIds.toList)
      diffIds = newPackageIds.filterNot(loadedPackageIds): List[String] // keeping the order
      result <- if (diffIds.isEmpty) UpToDate
      else load(client, diffIds, token)
    } yield result

  private def load(
      client: PackageClient,
      packageIds: List[String],
      token: Option[String]): Future[Error \/ Some[PackageStore]] =
    packageIds
      .traverse(client.getPackage(_, token))
      .map(createPackageStoreFromArchives)
      .map(_.map(Some(_)))

  private def createPackageStoreFromArchives(
      packageResponses: List[GetPackageResponse]): Error \/ PackageStore = {
    packageResponses
      .traverseU { packageResponse: GetPackageResponse =>
        decodeInterfaceFromPackageResponse(packageResponse).map { interface =>
          (interface.packageId, interface)
        }
      }
      .map(_.toMap)
  }

  private def decodeInterfaceFromPackageResponse(
      packageResponse: GetPackageResponse): Error \/ Interface = {
    import packageResponse._
    \/.fromTryCatchNonFatal {
      val cos = Reader.damlLfCodedInputStream(archivePayload.newInput)
      val payload = DamlLf.ArchivePayload.parseFrom(cos)
      val (errors, out) =
        InterfaceReader.readInterface(PackageId.assertFromString(hash) -> payload)
      if (!errors.empty) \/.left("Errors reading LF archive:\n" + errors.toString)
      else \/.right(out)
    }.leftMap(_.getLocalizedMessage).join
  }

  def damlLfTypeLookup(packageStore: () => PackageStore)(id: Identifier): Option[DefDataType.FWT] =
    for {
      iface <- packageStore().get(id.packageId.toString)
      ifaceType <- iface.typeDecls.get(id.qualifiedName)
    } yield ifaceType.`type`
} 
Example 48
Source File: FrontEnd.scala    From threejs-facade   with Mozilla Public License 2.0 5 votes vote down vote up
package org.denigma.preview

import org.denigma.binding.binders.GeneralBinder
import org.denigma.binding.extensions.sq
import org.denigma.binding.views.BindableView
import org.querki.jquery._
import org.scalajs.dom
import org.scalajs.dom.raw.HTMLElement
import org.denigma.binding.extensions._

import scala.collection.immutable.Map
import scala.scalajs.js
import scala.scalajs.js.annotation.JSExport
import scala.util.Try


@JSExport("FrontEnd")
object FrontEnd extends BindableView with scalajs.js.JSApp
{

  lazy val elem: HTMLElement = dom.document.body

  val sidebarParams = js.Dynamic.literal(
    exclusive = false,
    dimPage = false,
    closable = false,
    useLegacy = true
  )


  override lazy val injector = defaultInjector.register("sidebar")((el, params) => new SidebarView(el).withBinder(new GeneralBinder(_)))


  @JSExport
  def main(): Unit = {
    this.bindView()
    Example.activate() //activate examples
  }

  @JSExport
  def showLeftSidebar() = {
    $(".left.sidebar").dyn.sidebar(sidebarParams).sidebar("show")
  }

  @JSExport
  def load(content: String, into: String): Unit = {
    dom.document.getElementById(into).innerHTML = content
  }

  @JSExport
  def moveInto(from: String, into: String): Unit = {
    for {
      ins <- sq.byId(from)
      intoElement <- sq.byId(into)
    } {
      this.loadElementInto(intoElement, ins.innerHTML)
      ins.parentNode.removeChild(ins)
    }
  }

  withBinder(new GeneralBinder(_))

} 
Example 49
Source File: MapOfIntsToBooleansReading.scala    From jsoniter-scala   with MIT License 5 votes vote down vote up
package com.github.plokhotnyuk.jsoniter_scala.benchmark

import java.nio.charset.StandardCharsets.UTF_8

import com.avsystem.commons.serialization.json._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.DslPlatformJson._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.JacksonSerDesers._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.JsoniterScalaCodecs._
import com.github.plokhotnyuk.jsoniter_scala.benchmark.PlayJsonFormats._
//import com.github.plokhotnyuk.jsoniter_scala.benchmark.SprayFormats._
import com.github.plokhotnyuk.jsoniter_scala.core._
import io.circe.parser._
import org.openjdk.jmh.annotations.Benchmark
import play.api.libs.json.Json
//import upickle.default._
//import spray.json._

import scala.collection.immutable.Map

class MapOfIntsToBooleansReading extends MapOfIntsToBooleansBenchmark {
  @Benchmark
  def avSystemGenCodec(): Map[Int, Boolean] = JsonStringInput.read[Map[Int, Boolean]](new String(jsonBytes, UTF_8))

  @Benchmark
  def circe(): Map[Int, Boolean] = decode[Map[Int, Boolean]](new String(jsonBytes, UTF_8)).fold(throw _, identity)

  @Benchmark
  def dslJsonScala(): Map[Int, Boolean] = dslJsonDecode[Map[Int, Boolean]](jsonBytes)

  @Benchmark
  def jacksonScala(): Map[Int, Boolean] = jacksonMapper.readValue[Map[Int, Boolean]](jsonBytes)

  @Benchmark
  def jsoniterScala(): Map[Int, Boolean] = readFromArray[Map[Int, Boolean]](jsonBytes)

  @Benchmark
  def playJson(): Map[Int, Boolean] = Json.parse(jsonBytes).as[Map[Int, Boolean]]

} 
Example 50
Source File: MapOfIntsToBooleansBenchmark.scala    From jsoniter-scala   with MIT License 5 votes vote down vote up
package com.github.plokhotnyuk.jsoniter_scala.benchmark

import java.nio.charset.StandardCharsets.UTF_8

import org.openjdk.jmh.annotations.{Param, Setup}

import scala.collection.immutable.Map

abstract class MapOfIntsToBooleansBenchmark extends CommonParams {
  @Param(Array("1", "10", "100", "1000", "10000", "100000", "1000000"))
  var size: Int = 1000
  var obj: Map[Int, Boolean] = _
  var jsonString: String = _
  var jsonBytes: Array[Byte] = _
  var preallocatedBuf: Array[Byte] = _

  @Setup
  def setup(): Unit = {
    obj = Map((1 to size).map { i =>
      (((i * 1498724053) / Math.pow(10, i % 10)).toInt, ((i * 1498724053) & 0x1) == 0)
    }:_*)
    jsonString = obj.map(e => "\"" + e._1 + "\":" + e._2).mkString("{", ",", "}")
    jsonBytes = jsonString.getBytes(UTF_8)
    preallocatedBuf = new Array[Byte](jsonBytes.length + 100)
  }
} 
Example 51
Source File: SyntaxSpec.scala    From pureconfig   with Mozilla Public License 2.0 5 votes vote down vote up
package pureconfig.syntax

import scala.collection.immutable.{ List, Map }

import com.typesafe.config.ConfigFactory
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import pureconfig.error.ConfigReaderException
import pureconfig.generic.auto._

class SyntaxSpec extends AnyFlatSpec with Matchers {

  behavior of "pureconfig.syntax._"

  it should "be able to serialize a ConfigValue from a type with ConfigConvert using the toConfig method" in {
    Map("a" -> 1, "b" -> 2).toConfig shouldBe ConfigFactory.parseString("""{ "a": 1, "b": 2 }""").root()
  }

  it should "be able to load a ConfigValue to a type with ConfigConvert using the to method" in {
    val conf = ConfigFactory.parseString("""{ "a": [1, 2, 3, 4], "b": { "k1": "v1", "k2": "v2" } }""")
    conf.getList("a").to[List[Int]] shouldBe Right(List(1, 2, 3, 4))
    conf.getObject("b").to[Map[String, String]] shouldBe Right(Map("k1" -> "v1", "k2" -> "v2"))
  }

  it should "be able to load a Config to a type with ConfigConvert using the to method" in {
    val conf = ConfigFactory.parseString("""{ "a": [1, 2, 3, 4], "b": { "k1": "v1", "k2": "v2" } }""")
    case class Conf(a: List[Int], b: Map[String, String])
    conf.to[Conf] shouldBe Right(Conf(List(1, 2, 3, 4), Map("k1" -> "v1", "k2" -> "v2")))
  }

  // TODO this shouldn't be here
  it should "fail when trying to convert to basic types from an empty string" in {
    val conf = ConfigFactory.parseString("""{ v: "" }""")
    conf.getValue("v").to[Boolean].isLeft shouldBe true
    conf.getValue("v").to[Double].isLeft shouldBe true
    conf.getValue("v").to[Float].isLeft shouldBe true
    conf.getValue("v").to[Int].isLeft shouldBe true
    conf.getValue("v").to[Long].isLeft shouldBe true
    conf.getValue("v").to[Short].isLeft shouldBe true
  }

  it should "fail with Exception when trying to convert to basic types from an empty string" in {
    val conf = ConfigFactory.parseString("""{ v: "" }""")

    a[ConfigReaderException[_]] should be thrownBy conf.getValue("v").toOrThrow[Boolean]
    a[ConfigReaderException[_]] should be thrownBy conf.getValue("v").toOrThrow[Double]
    a[ConfigReaderException[_]] should be thrownBy conf.getValue("v").toOrThrow[Float]
    a[ConfigReaderException[_]] should be thrownBy conf.getValue("v").toOrThrow[Int]
    a[ConfigReaderException[_]] should be thrownBy conf.getValue("v").toOrThrow[Long]
    a[ConfigReaderException[_]] should be thrownBy conf.getValue("v").toOrThrow[Short]
  }

  it should "pass when trying to convert to basic types with pureconfig.syntax toOrThrow" in {
    val conf = ConfigFactory.parseString("""{ b: true, d: 2.2, f: 3.3, i: 2, l: 2, s: 2, cs: "Cheese"}""")

    conf.getValue("b").toOrThrow[Boolean] shouldBe true
    conf.getValue("d").toOrThrow[Double] shouldBe 2.2
    conf.getValue("f").toOrThrow[Float] shouldBe 3.3f
    conf.getValue("i").toOrThrow[Int] shouldBe 2
    conf.getValue("l").toOrThrow[Long] shouldBe 2L
    conf.getValue("s").toOrThrow[Short] shouldBe 2.toShort
    conf.getValue("cs").toOrThrow[String] shouldBe "Cheese"

  }
} 
Example 52
Source File: ConcordanceParser.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.concordance

import scala.util.parsing.combinator._
import scala.util.parsing.input.Positional
import scala.io.Source
import scala.collection.immutable.Map


class ConcordanceParser extends RegexParsers {
  private val rWord = """[\w’]+[,;\.\-\?\!\—]?""".r
  def word: Parser[PositionalString] = positioned(regex(rWord) ^^ {w => PositionalString(w)})
  def sentence: Parser[Seq[PositionalString]] = rep(word)
}

case class PositionalString(s: String) extends Positional

object ConcordanceParser {
 
  def main(args: Array[String]): Unit = {
    val docs = for (f <- args) yield Source.fromFile(f).mkString
    val concordance = for (i <- docs.indices) yield (args(i),parseDoc(docs(i)))
    println(concordance)
    // an alternative way of looking at the data (gives doc, page, line and char numbers with each string)
    val q = for {(d,xxxx) <- concordance; (p,xxx) <- xxxx; (l,xx) <- xxx; (_,c,x) <- xx} yield (d, p,l,c,x)
    println(q)
    // yet another way to look at the data
    val concordanceMap = concordance.toMap
    println(concordanceMap)
  }
  
  private def parseDoc(content: String) = {
    val pages = for (p <- content.split("/p")) yield p
    for (i <- pages.indices) yield (i+1,parsePage(pages(i)))
  }

  private def parsePage(content: String) = {
    val lines = for (l <- content.split("\n")) yield l
    for (i <- lines.indices) yield (i+1,parseLine(lines(i)))
  }

  def parseLine(line: String): Seq[(Int,Int,String)] = {
    def tidy(s: String) = s.replaceAll("""[,;\.\-\?\!\—]""", "")
    val p = new ConcordanceParser
    val r = p.parseAll(p.sentence,line) match {
      case p.Success(ws,_) => ws
      case p.Failure(e,_) => println(e); List()
      case _ => println("PositionalParser: logic error"); List()
    }
    r map {case p @ PositionalString(s) => (p.pos.line,p.pos.column,tidy(s).toLowerCase)}
  }
} 
Example 53
Source File: MapArrayParam.scala    From mmlspark   with MIT License 5 votes vote down vote up
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.

package org.apache.spark.ml.param

import spray.json.{DefaultJsonProtocol, _}
import scala.collection.JavaConverters._
import scala.collection.immutable.Map
import scala.collection.mutable

object MapArrayJsonProtocol extends DefaultJsonProtocol {

  implicit object MapJsonFormat extends JsonFormat[Map[String, Seq[String]]] {
    def write(m: Map[String, Seq[String]]): JsValue = {
      JsObject(m.mapValues {
        case v: Seq[String] => seqFormat[String].write(v)
        case default => serializationError(s"Unable to serialize $default")
      })
    }

    def read(value: JsValue): Map[String, Seq[String]] = value.asInstanceOf[JsObject].fields.map(kvp => {
      val convValue = kvp._2 match {
        case v: JsValue => seqFormat[String].read(v)
        case default => deserializationError(s"Unable to deserialize $default")
      }
      (kvp._1, convValue)
    })
  }

}


    def w(value: java.util.HashMap[String, java.util.List[String]]): ParamPair[Map[String, Seq[String]]] = {
      val mutMap = mutable.Map[String, Seq[String]]()
      for (key <- value.keySet().asScala) {
        val list = value.get(key).asScala
        mutMap(key) = list
      }
      w(mutMap.toMap)
    }

    override def jsonEncode(value: Map[String, Seq[String]]): String = {
      val convertedMap = value.map(kvp => (kvp._1, kvp._2.toArray))
      val json = convertedMap.toJson
      json.prettyPrint
    }

    override def jsonDecode(json: String): Map[String, Seq[String]] = {
      val jsonValue = json.parseJson
      jsonValue.convertTo[Map[String, Seq[String]]]
    }

  } 
Example 54
Source File: ArrayMapParam.scala    From mmlspark   with MIT License 5 votes vote down vote up
// Copyright (C) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in project root for information.

package org.apache.spark.ml.param

import org.apache.spark.ml.util.Identifiable
import spray.json.{DefaultJsonProtocol, _}

import scala.collection.immutable.Map

object ArrayMapJsonProtocol extends DefaultJsonProtocol {

  implicit object MapJsonFormat extends JsonFormat[Map[String, Any]] {
    def write(m: Map[String, Any]): JsValue = {
      JsObject(m.mapValues {
        case v: Int => JsNumber(v)
        case v: Double => JsNumber(v)
        case v: String => JsString(v)
        case true => JsTrue
        case false => JsFalse
        case v: Map[_, _] => write(v.asInstanceOf[Map[String, Any]])
        case default => serializationError(s"Unable to serialize $default")
      })
    }

    def read(value: JsValue): Map[String, Any] = value.asInstanceOf[JsObject].fields.map(kvp => {
      val convValue = kvp._2 match {
        case JsNumber(n) => if (n.isValidInt) n.intValue().asInstanceOf[Any] else n.toDouble.asInstanceOf[Any]
        case JsString(s) => s
        case JsTrue => true
        case JsFalse => false
        case v: JsValue => read(v)
        case default => deserializationError(s"Unable to deserialize $default")
      }
      (kvp._1, convValue)
    })
  }

}


    override def w(value: Array[Map[String, Any]]): ParamPair[Array[Map[String, Any]]] = super.w(value)

    override def jsonEncode(value: Array[Map[String, Any]]): String = {
      val json = value.toSeq.toJson
      json.prettyPrint
    }

    override def jsonDecode(json: String): Array[Map[String, Any]] = {
      val jsonValue = json.parseJson
      jsonValue.convertTo[Seq[Map[String, Any]]].toArray
    }

  } 
Example 55
Source File: AccountRepositoryInMemory.scala    From frdomain-extras   with Apache License 2.0 5 votes vote down vote up
package frdomain.ch6
package domain
package repository
package interpreter

import java.time.LocalDateTime
import scala.collection.immutable.Map 

import cats._
import cats.data._
import cats.implicits._
import cats.instances.all._
import cats.effect.concurrent.Ref
import cats.effect.Sync

import common._
import model.account._

// Constructor private for the interpreter to prevent the Ref from leaking
// access through smart constructor below
final class AccountRepositoryInMemory[M[_]: Monad] private (repo: Ref[M, Map[AccountNo, Account]])
  extends AccountRepository[M] {

  def query(no: AccountNo): M[Option[Account]] = repo.get.map(_.get(no))  

  def store(a: Account): M[Account] = repo.update(_ + ((a.no, a))).map(_ => a)

  def query(openedOn: LocalDateTime): M[List[Account]] = 
    repo.get.map(_.values.filter(_.dateOfOpen.getOrElse(today) == openedOn).toList)

  def all: M[List[Account]] = repo.get.map(_.values.toList)

  def balance(no: AccountNo): M[Option[Balance]] = query(no).map(_.map(_.balance))
}

// Smart constructor 
object AccountRepositoryInMemory {
  def make[M[_]: Sync]: M[AccountRepositoryInMemory[M]] =
    Ref.of[M, Map[AccountNo, Account]](Map.empty).map(new AccountRepositoryInMemory(_))
} 
Example 56
Source File: GraphMatchingTestSupport.scala    From morpheus   with Apache License 2.0 5 votes vote down vote up
package org.opencypher.morpheus.testing.support

import org.opencypher.morpheus.impl.table.SparkTable.DataFrameTable
import org.opencypher.morpheus.testing.fixture.{MorpheusSessionFixture, SparkSessionFixture}
import org.opencypher.okapi.relational.api.graph.RelationalCypherGraph
import org.opencypher.okapi.relational.api.table.RelationalCypherRecords
import org.opencypher.okapi.testing.BaseTestSuite
import org.scalatest.Assertion

import scala.collection.immutable.Map

trait GraphMatchingTestSupport {

  self: BaseTestSuite with SparkSessionFixture with MorpheusSessionFixture =>

  private def getElementIds(records: RelationalCypherRecords[DataFrameTable]): Set[List[Byte]] = {
    val elementVar = records.header.vars.toSeq match {
      case Seq(v) => v
      case other => throw new UnsupportedOperationException(s"Expected records with 1 element, got $other")
    }

    records.table.df.select(records.header.column(elementVar)).collect().map(_.getAs[Array[Byte]](0).toList).toSet
  }

  private def verify(actual: RelationalCypherGraph[DataFrameTable], expected: RelationalCypherGraph[DataFrameTable]): Assertion = {
    val expectedNodeIds = getElementIds(expected.nodes("n"))
    val expectedRelIds = getElementIds(expected.relationships("r"))

    val actualNodeIds = getElementIds(actual.nodes("n"))
    val actualRelIds = getElementIds(actual.relationships("r"))

    expectedNodeIds should equal(actualNodeIds)
    expectedRelIds should equal(actualRelIds)
  }

  implicit class GraphsMatcher(graphs: Map[String, RelationalCypherGraph[DataFrameTable]]) {
    def shouldMatch(expectedGraphs: RelationalCypherGraph[DataFrameTable]*): Unit = {
      withClue("expected and actual must have same size") {
        graphs.size should equal(expectedGraphs.size)
      }

      graphs.values.zip(expectedGraphs).foreach {
        case (actual, expected) => verify(actual, expected)
      }
    }
  }

  implicit class GraphMatcher(graph: RelationalCypherGraph[DataFrameTable]) {
    def shouldMatch(expectedGraph: RelationalCypherGraph[DataFrameTable]): Unit = verify(graph, expectedGraph)
  }
} 
Example 57
Source File: Replacements.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.norm

import io.getquill.ast.Ast
import scala.collection.immutable.Map


  def contains(key: Ast): Boolean =
    map.map { case (k, v) => k.neutralize }.toList.contains(key.neutralize)

  def ++(otherMap: collection.Map[Ast, Ast]): Replacements =
    Replacements(map ++ otherMap)

  def -(key: Ast): Replacements = {
    val newMap = map.toList.filterNot { case (k, v) => k.neutralize == key.neutralize }.toMap
    Replacements(newMap)
  }
}

object Replacements {
  def empty: Replacements =
    Replacements(Map())
} 
Example 58
Source File: ConfigUtils.scala    From aerosolve   with Apache License 2.0 5 votes vote down vote up
package com.airbnb.common.ml.util

import collection.JavaConverters._
import scala.collection.immutable.Map
import scala.util.Try

import com.typesafe.config.Config



  private def configToMapHelper(config: Config): Map[String, Any] = {
    config.entrySet().asScala.map(
      entry => {
        val value = entry.getValue.unwrapped()
        val obj = value match {
          case l: java.util.List[_] => l.asInstanceOf[java.util.List[_]].asScala
          case _ => value
        }

        (entry.getKey, obj)
      }
    ).toMap
  }

  def getMapStringList(config: Config, path: String, delimiter: String): Map[String, String] = {
    config
      .getStringList(path)
      .asScala
      .map(line => line.split(delimiter))
      .filter(_.length == 2)
      .map(kv => kv(0) -> kv(1))
      .toMap
  }

} 
Example 59
Source File: ConfiguredAsObjectEncoder.scala    From circe-generic-extras   with Apache License 2.0 5 votes vote down vote up
package io.circe.generic.extras.encoding

import io.circe.JsonObject
import io.circe.generic.encoding.DerivedAsObjectEncoder
import io.circe.generic.extras.{ Configuration, JsonKey }
import java.util.concurrent.ConcurrentHashMap
import scala.annotation.implicitNotFound
import scala.collection.immutable.Map
import shapeless.{ Annotations, Coproduct, HList, LabelledGeneric, Lazy }
import shapeless.ops.hlist.ToTraversable
import shapeless.ops.record.Keys

@implicitNotFound(
  """Could not find ConfiguredAsObjectEncoder for type ${A}.
Some possible causes for this:
- ${A} isn't a case class or sealed trat
- some of ${A}'s members don't have codecs of their own
- missing implicit Configuration"""
)
abstract class ConfiguredAsObjectEncoder[A](config: Configuration) extends DerivedAsObjectEncoder[A] {
  private[this] val constructorNameCache: ConcurrentHashMap[String, String] =
    new ConcurrentHashMap[String, String]()

  protected[this] def constructorNameTransformer(value: String): String = {
    val current = constructorNameCache.get(value)

    if (current eq null) {
      val transformed = config.transformConstructorNames(value)
      constructorNameCache.put(value, transformed)
      transformed
    } else {
      current
    }
  }
}

object ConfiguredAsObjectEncoder {
  implicit def encodeCaseClass[A, R <: HList, F <: HList, K <: HList](implicit
    gen: LabelledGeneric.Aux[A, R],
    encode: Lazy[ReprAsObjectEncoder[R]],
    config: Configuration,
    fields: Keys.Aux[R, F],
    fieldsToList: ToTraversable.Aux[F, List, Symbol],
    keys: Annotations.Aux[JsonKey, A, K],
    keysToList: ToTraversable.Aux[K, List, Option[JsonKey]]
  ): ConfiguredAsObjectEncoder[A] = new ConfiguredAsObjectEncoder[A](config) {
    private[this] val keyAnnotations: List[Option[JsonKey]] = keysToList(keys())
    private[this] val hasKeyAnnotations: Boolean = keyAnnotations.exists(_.nonEmpty)

    private[this] val keyAnnotationMap: Map[String, String] =
      fieldsToList(fields())
        .map(_.name)
        .zip(keyAnnotations)
        .collect {
          case (field, Some(keyAnnotation)) => (field, keyAnnotation.value)
        }
        .toMap

    private[this] def memberNameTransformer(value: String): String =
      if (hasKeyAnnotations)
        keyAnnotationMap.getOrElse(value, config.transformMemberNames(value))
      else
        config.transformMemberNames(value)

    private[this] val transformedMemberCache: Map[String, String] = {
      fieldsToList(fields()).map(f => (f.name, memberNameTransformer(f.name))).toMap
    }

    private[this] def transformMemberName(value: String) =
      transformedMemberCache.getOrElse(value, value)

    final def encodeObject(a: A): JsonObject =
      encode.value.configuredEncodeObject(gen.to(a))(
        transformMemberName,
        constructorNameTransformer,
        None
      )
  }

  implicit def encodeAdt[A, R <: Coproduct](implicit
    gen: LabelledGeneric.Aux[A, R],
    encode: Lazy[ReprAsObjectEncoder[R]],
    config: Configuration
  ): ConfiguredAsObjectEncoder[A] = new ConfiguredAsObjectEncoder[A](config) {
    final def encodeObject(a: A): JsonObject =
      encode.value.configuredEncodeObject(gen.to(a))(
        Predef.identity,
        constructorNameTransformer,
        config.discriminator
      )
  }
} 
Example 60
Source File: ReprAsObjectCodec.scala    From circe-generic-extras   with Apache License 2.0 5 votes vote down vote up
package io.circe.generic.extras.codec

import cats.data.Validated
import io.circe.{ Decoder, DecodingFailure, HCursor, JsonObject }
import io.circe.generic.extras.ConfigurableDeriver
import io.circe.generic.extras.decoding.ReprDecoder
import io.circe.generic.extras.encoding.ReprAsObjectEncoder
import scala.annotation.implicitNotFound
import scala.collection.immutable.Map
import scala.language.experimental.macros
import shapeless.HNil


@implicitNotFound(
  """Could not find ReprAsObjectCodec for type ${A}.
Some possible causes for this:
- ${A} isn't a case class or sealed trat
- some of ${A}'s members don't have codecs of their own
- missing implicit Configuration"""
)
abstract class ReprAsObjectCodec[A] extends ReprDecoder[A] with ReprAsObjectEncoder[A]

object ReprAsObjectCodec {
  implicit def deriveReprAsObjectCodec[R]: ReprAsObjectCodec[R] = macro ConfigurableDeriver.deriveConfiguredCodec[R]

  val hnilReprCodec: ReprAsObjectCodec[HNil] = new ReprAsObjectCodec[HNil] {
    def configuredDecode(c: HCursor)(
      transformMemberNames: String => String,
      transformConstructorNames: String => String,
      defaults: Map[String, Any],
      discriminator: Option[String]
    ): Decoder.Result[HNil] =
      if (c.value.isObject) Right(HNil) else Left(DecodingFailure("HNil", c.history))

    def configuredDecodeAccumulating(c: HCursor)(
      transformMemberNames: String => String,
      transformConstructorNames: String => String,
      defaults: Map[String, Any],
      discriminator: Option[String]
    ): Decoder.AccumulatingResult[HNil] =
      if (c.value.isObject) Validated.valid(HNil) else Validated.invalidNel(DecodingFailure("HNil", c.history))

    def configuredEncodeObject(a: HNil)(
      transformMemberNames: String => String,
      transformDiscriminator: String => String,
      discriminator: Option[String]
    ): JsonObject = JsonObject.empty
  }
} 
Example 61
Source File: RecordToMap.scala    From circe-generic-extras   with Apache License 2.0 5 votes vote down vote up
package io.circe.generic.extras.util

import scala.collection.immutable.Map
import shapeless.{ ::, HList, HNil, Witness }
import shapeless.labelled.FieldType

abstract class RecordToMap[R <: HList] {
  def apply(r: R): Map[String, Any]
}

object RecordToMap {
  implicit val hnilRecordToMap: RecordToMap[HNil] = new RecordToMap[HNil] {
    def apply(r: HNil): Map[String, Any] = Map.empty
  }

  implicit def hconsRecordToMap[K <: Symbol, V, T <: HList](implicit
    wit: Witness.Aux[K],
    rtmT: RecordToMap[T]
  ): RecordToMap[FieldType[K, V] :: T] = new RecordToMap[FieldType[K, V] :: T] {
    def apply(r: FieldType[K, V] :: T): Map[String, Any] = rtmT(r.tail) + ((wit.value.name, r.head))
  }
}