java.util.Date Scala Examples

The following examples show how to use java.util.Date. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: NaptimeModuleTest.scala    From naptime   with Apache License 2.0 8 votes vote down vote up
package org.coursera.naptime

import java.util.Date
import javax.inject.Inject

import akka.stream.Materializer
import com.google.inject.Guice
import com.google.inject.Stage
import com.linkedin.data.schema.DataSchema
import com.linkedin.data.schema.DataSchemaUtil
import com.linkedin.data.schema.PrimitiveDataSchema
import com.linkedin.data.schema.RecordDataSchema
import org.coursera.naptime.model.KeyFormat
import org.coursera.naptime.resources.TopLevelCollectionResource
import org.coursera.naptime.router2.NaptimeRoutes
import org.junit.Test
import org.mockito.Mockito.mock
import org.scalatest.junit.AssertionsForJUnit
import play.api.libs.json.Json
import play.api.libs.json.OFormat

import scala.concurrent.ExecutionContext

object NaptimeModuleTest {
  case class User(name: String, createdAt: Date)
  object User {
    implicit val oFormat: OFormat[User] = Json.format[User]
  }
  class MyResource(implicit val executionContext: ExecutionContext, val materializer: Materializer)
      extends TopLevelCollectionResource[String, User] {
    override implicit def resourceFormat: OFormat[User] = User.oFormat
    override def keyFormat: KeyFormat[KeyType] = KeyFormat.stringKeyFormat
    override def resourceName: String = "myResource"
    implicit val fields = Fields

    def get(id: String) = Nap.get(ctx => ???)
  }
  object MyFakeModule extends NaptimeModule {
    override def configure(): Unit = {
      bindResource[MyResource]
      bind[MyResource].toInstance(mock(classOf[MyResource]))
      bindSchemaType[Date](DataSchemaUtil.dataSchemaTypeToPrimitiveDataSchema(DataSchema.Type.LONG))
    }
  }

  class OverrideTypesHelper @Inject()(val schemaOverrideTypes: NaptimeModule.SchemaTypeOverrides)
}

class NaptimeModuleTest extends AssertionsForJUnit {
  import NaptimeModuleTest._

  
  @Test
  def checkInferredOverrides(): Unit = {
    val injector = Guice.createInjector(Stage.DEVELOPMENT, MyFakeModule, NaptimeModule)
    val overrides = injector.getInstance(classOf[OverrideTypesHelper])
    assert(overrides.schemaOverrideTypes.size === 1)
    assert(overrides.schemaOverrideTypes.contains("java.util.Date"))
  }

  @Test
  def checkComputedOverrides(): Unit = {
    val injector = Guice.createInjector(Stage.DEVELOPMENT, MyFakeModule, NaptimeModule)
    val overrides = injector.getInstance(classOf[OverrideTypesHelper])
    val routes = injector.getInstance(classOf[NaptimeRoutes])
    assert(1 === routes.routerBuilders.size)
    val routerBuilder = routes.routerBuilders.head
    val inferredSchemaKeyed =
      routerBuilder.types.find(_.key == "org.coursera.naptime.NaptimeModuleTest.User").get
    assert(inferredSchemaKeyed.value.isInstanceOf[RecordDataSchema])
    val userSchema = inferredSchemaKeyed.value.asInstanceOf[RecordDataSchema]
    assert(2 === userSchema.getFields.size())
    val initialCreatedAtSchema = userSchema.getField("createdAt").getType.getDereferencedDataSchema
    assert(initialCreatedAtSchema.isInstanceOf[RecordDataSchema])
    assert(
      initialCreatedAtSchema
        .asInstanceOf[RecordDataSchema]
        .getDoc
        .contains("Unable to infer schema"))
    SchemaUtils.fixupInferredSchemas(userSchema, overrides.schemaOverrideTypes)
    val fixedCreatedAtSchema = userSchema.getField("createdAt").getType.getDereferencedDataSchema
    assert(fixedCreatedAtSchema.isInstanceOf[PrimitiveDataSchema])
  }
} 
Example 2
Source File: package.scala    From gbf-raidfinder   with MIT License 5 votes vote down vote up
package walfie.gbf.raidfinder

import java.util.Date

package object domain {
  type BossName = String
  type TweetId = Long
  type RaidImage = String
}

package domain {

  sealed trait Language
  object Language {
    case object English extends Language
    case object Japanese extends Language
  }

  case class RaidInfo(
    tweet: RaidTweet,
    boss:  RaidBoss
  )

  case class RaidTweet(
    bossName:     BossName,
    raidId:       String,
    screenName:   String,
    tweetId:      TweetId,
    profileImage: String,
    text:         String,
    createdAt:    Date,
    language:     Language
  )

  case class RaidBoss(
    name:     BossName,
    level:    Int,
    image:    Option[RaidImage],
    lastSeen: Date,
    language: Language
  )

  trait FromRaidTweet[T] {
    def from(raidTweet: RaidTweet): T
  }

  object FromRaidTweet {
    def apply[T](fromF: RaidTweet => T) = new FromRaidTweet[T] {
      def from(raidTweet: RaidTweet): T = fromF(raidTweet)
    }

    val Identity: FromRaidTweet[RaidTweet] =
      FromRaidTweet[RaidTweet](identity)
  }
} 
Example 3
Source File: RaidFinder.scala    From gbf-raidfinder   with MIT License 5 votes vote down vote up
package walfie.gbf.raidfinder

import java.util.Date
import monix.eval.Task
import monix.execution.{Cancelable, Scheduler}
import monix.reactive._
import scala.concurrent.duration._
import scala.concurrent.Future
import twitter4j._
import walfie.gbf.raidfinder.domain._
import walfie.gbf.raidfinder.util.CachedObservablesPartitioner

trait RaidFinder[T] {
  def getRaidTweets(bossName: BossName): Observable[T]
  def newBossObservable: Observable[RaidBoss]
  def getKnownBosses(): Map[BossName, RaidBoss]
  def purgeOldBosses(
    minDate:        Date,
    levelThreshold: Option[Int]
  ): Future[Map[BossName, RaidBoss]]

  def shutdown(): Unit
}

object RaidFinder {
  val DefaultCacheSizePerBoss = 20
  val DefaultBackfillSize = 200

  
  protected def onShutdown(): Unit = ()

  // TODO: Parsing happens twice somewhere -- should figure out where
  private val raidInfos = statusesObservable
    .collect(Function.unlift(StatusParser.parse))
    .publish

  private val (partitioner, partitionerCancelable) =
    CachedObservablesPartitioner.fromUngroupedObservable(
      raidInfos.map(_.tweet),
      cachedTweetsPerBoss,
      (_: RaidTweet).bossName,
      fromRaidTweet.from // TODO
    )

  private val (knownBosses, knownBossesCancelable) = KnownBossesObserver
    .fromRaidInfoObservable(raidInfos, initialBosses)

  val newBossObservable = knownBosses.newBossObservable

  private val raidInfosCancelable = raidInfos.connect()

  private val cancelable = Cancelable { () =>
    List(
      raidInfosCancelable,
      partitionerCancelable,
      knownBossesCancelable
    ).foreach(_.cancel)
    onShutdown()
  }

  def shutdown(): Unit = cancelable.cancel()
  def getKnownBosses(): Map[BossName, RaidBoss] =
    knownBosses.get()
  def getRaidTweets(bossName: BossName): Observable[T] =
    partitioner.getObservable(bossName)

  def purgeOldBosses(
    minDate:        Date,
    levelThreshold: Option[Int]
  ): Future[Map[BossName, RaidBoss]] =
    knownBosses.purgeOldBosses(minDate, levelThreshold)
} 
Example 4
Source File: time.scala    From gbf-raidfinder   with MIT License 5 votes vote down vote up
package walfie.gbf.raidfinder.client.util

import java.util.Date
import scala.scalajs.js

package time {
  trait Clock { def now(): Date }
  object SystemClock extends Clock { def now(): Date = new Date() }

  case class Duration(milliseconds: Long) extends AnyVal
  object Duration {
    def seconds(s: Long): Duration = Duration(s * 1000)
    def minutes(m: Long): Duration = Duration(m * 60 * 1000)
    def hours(h: Long): Duration = Duration(h * 3600 * 1000)
    def days(d: Long): Duration = Duration(d * 24 * 3600 * 1000)
  }
}

package object time {
  val MomentShortLocale: js.Dictionary[js.Any] = js.Dictionary(
    "parentLocale" -> "en",
    "relativeTime" -> js.Dictionary(
      "future" -> "in %s",
      "past" -> "%s ago",
      "s" -> "now",
      "ss" -> "%ss",
      "m" -> "1m",
      "mm" -> "%dm",
      "h" -> "1h",
      "hh" -> "%dh",
      "d" -> "1d",
      "dd" -> "%dd",
      "M" -> "1M",
      "MM" -> "%dM",
      "y" -> "1Y",
      "yy" -> "%dY"
    )
  )
} 
Example 5
Source File: PastValidatorForOption.scala    From bean-validation-scala   with MIT License 5 votes vote down vote up
package com.tsukaby.bean_validation_scala

import java.util.{Calendar, Date}
import javax.validation.constraints.Past
import javax.validation.{ConstraintValidator, ConstraintValidatorContext}

import org.hibernate.validator.internal.constraintvalidators.bv.past.{PastValidatorForReadablePartial, PastValidatorForReadableInstant, PastValidatorForDate, PastValidatorForCalendar}
import org.joda.time.{ReadableInstant, ReadablePartial}


class PastValidatorForOption extends ConstraintValidator[Past, Option[_]] {
  private var constraintAnnotation: Past = null

  override def initialize(constraintAnnotation: Past): Unit = {
    this.constraintAnnotation = constraintAnnotation
  }

  override def isValid(value: Option[_], context: ConstraintValidatorContext): Boolean = {

    value match {
      case Some(x: Calendar) =>
        val v = new PastValidatorForCalendar
        v.initialize(constraintAnnotation)
        v.isValid(x, context)
      case Some(x: Date) =>
        val v = new PastValidatorForDate
        v.initialize(constraintAnnotation)
        v.isValid(x, context)
      case Some(x: ReadableInstant) =>
        val v = new PastValidatorForReadableInstant
        v.initialize(constraintAnnotation)
        v.isValid(x, context)
      case Some(x: ReadablePartial) =>
        val v = new PastValidatorForReadablePartial
        v.initialize(constraintAnnotation)
        v.isValid(x, context)
      case None =>
        true
      case Some(_) =>
        throw new IllegalStateException("oops.")
    }
  }
} 
Example 6
Source File: FutureValidatorForOption.scala    From bean-validation-scala   with MIT License 5 votes vote down vote up
package com.tsukaby.bean_validation_scala

import java.util.{Calendar, Date}
import javax.validation.constraints.Future
import javax.validation.{ConstraintValidator, ConstraintValidatorContext}

import org.hibernate.validator.internal.constraintvalidators.bv.future.{FutureValidatorForReadablePartial, FutureValidatorForReadableInstant, FutureValidatorForDate, FutureValidatorForCalendar}
import org.joda.time.{ReadableInstant, ReadablePartial}


class FutureValidatorForOption extends ConstraintValidator[Future, Option[_]] {
  private var constraintAnnotation: Future = null

  override def initialize(constraintAnnotation: Future): Unit = {
    this.constraintAnnotation = constraintAnnotation
  }

  override def isValid(value: Option[_], context: ConstraintValidatorContext): Boolean = {

    value match {
      case Some(x: Calendar) =>
        val v = new FutureValidatorForCalendar
        v.initialize(constraintAnnotation)
        v.isValid(x, context)
      case Some(x: Date) =>
        val v = new FutureValidatorForDate
        v.initialize(constraintAnnotation)
        v.isValid(x, context)
      case Some(x: ReadableInstant) =>
        val v = new FutureValidatorForReadableInstant
        v.initialize(constraintAnnotation)
        v.isValid(x, context)
      case Some(x: ReadablePartial) =>
        val v = new FutureValidatorForReadablePartial
        v.initialize(constraintAnnotation)
        v.isValid(x, context)
      case None =>
        true
      case Some(_) =>
        throw new IllegalStateException("oops.")
    }
  }
} 
Example 7
Source File: PastValidatorForOptionSpec.scala    From bean-validation-scala   with MIT License 5 votes vote down vote up
package com.tsukaby.bean_validation_scala

import java.util.{Locale, Date, Calendar}
import javax.validation.constraints.Past

import org.joda.time.DateTime

import scala.annotation.meta.field

class PastValidatorForOptionSpec extends BaseSpec {

  private[this] case class TestBeanWithOptionCalendar(
                                                       @(Past@field)
                                                       value: Option[Calendar]
                                                       )

  private[this] case class TestBeanWithOptionDate(
                                                   @(Past@field)
                                                   value: Option[Date]
                                                   )

  private[this] case class TestBeanWithOptionDateTime(
                                                       @(Past@field)
                                                       value: Option[DateTime]
                                                       )

  val tomorrow = DateTime.now().plusDays(1)
  val yesterday = DateTime.now().minusDays(1)
  Seq(
    (TestBeanWithOptionCalendar(Some(tomorrow.toCalendar(Locale.getDefault))), 1),
    (TestBeanWithOptionCalendar(Some(yesterday.toCalendar(Locale.getDefault))), 0),
    (TestBeanWithOptionDate(Some(tomorrow.toDate)), 1),
    (TestBeanWithOptionDate(Some(yesterday.toDate)), 0),
    (TestBeanWithOptionDateTime(Some(tomorrow)), 1),
    (TestBeanWithOptionDateTime(Some(yesterday)), 0)
  ) foreach { case (bean, expected) =>
    s"Check violations count. bean = $bean, count = $expected" >> {
      test(bean, expected)
    }
  }
} 
Example 8
Source File: FutureValidatorForOptionSpec.scala    From bean-validation-scala   with MIT License 5 votes vote down vote up
package com.tsukaby.bean_validation_scala

import java.util.{Calendar, Date, Locale}
import javax.validation.constraints.Future

import org.joda.time.DateTime

import scala.annotation.meta.field

class FutureValidatorForOptionSpec extends BaseSpec {

  private[this] case class TestBeanWithOptionCalendar(
                                                       @(Future@field)
                                                       value: Option[Calendar]
                                                       )

  private[this] case class TestBeanWithOptionDate(
                                                   @(Future@field)
                                                   value: Option[Date]
                                                   )

  private[this] case class TestBeanWithOptionDateTime(
                                                       @(Future@field)
                                                       value: Option[DateTime]
                                                       )

  val yesterday = DateTime.now().minusDays(1)
  val tomorrow = DateTime.now().plusDays(1)
  Seq(
    (TestBeanWithOptionCalendar(Some(yesterday.toCalendar(Locale.getDefault))), 1),
    (TestBeanWithOptionCalendar(Some(tomorrow.toCalendar(Locale.getDefault))), 0),
    (TestBeanWithOptionDate(Some(yesterday.toDate)), 1),
    (TestBeanWithOptionDate(Some(tomorrow.toDate)), 0),
    (TestBeanWithOptionDateTime(Some(yesterday)), 1),
    (TestBeanWithOptionDateTime(Some(tomorrow)), 0)
  ) foreach { case (bean, expected) =>
    s"Check violations count. bean = $bean, count = $expected" >> {
      test(bean, expected)
    }
  }
} 
Example 9
Source File: MesosDriverDescription.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.mesos

import java.util.Date

import org.apache.spark.SparkConf
import org.apache.spark.deploy.Command
import org.apache.spark.scheduler.cluster.mesos.MesosClusterRetryState


private[spark] class MesosDriverDescription(
    val name: String,
    val jarUrl: String,
    val mem: Int,
    val cores: Double,
    val supervise: Boolean,
    val command: Command,
    schedulerProperties: Map[String, String],
    val submissionId: String,
    val submissionDate: Date,
    val retryState: Option[MesosClusterRetryState] = None)
  extends Serializable {

  val conf = new SparkConf(false)
  schedulerProperties.foreach {case (k, v) => conf.set(k, v)}

  def copy(
      name: String = name,
      jarUrl: String = jarUrl,
      mem: Int = mem,
      cores: Double = cores,
      supervise: Boolean = supervise,
      command: Command = command,
      schedulerProperties: SparkConf = conf,
      submissionId: String = submissionId,
      submissionDate: Date = submissionDate,
      retryState: Option[MesosClusterRetryState] = retryState): MesosDriverDescription = {

    new MesosDriverDescription(name, jarUrl, mem, cores, supervise, command, conf.getAll.toMap,
      submissionId, submissionDate, retryState)
  }

  override def toString: String = s"MesosDriverDescription (${command.mainClass})"
} 
Example 10
Source File: PMMLModelExport.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.pmml.export

import java.text.SimpleDateFormat
import java.util.Date

import scala.beans.BeanProperty

import org.dmg.pmml.{Application, Header, PMML, Timestamp}

private[mllib] trait PMMLModelExport {

  
  @BeanProperty
  val pmml: PMML = {
    val version = getClass.getPackage.getImplementationVersion
    val app = new Application("Apache Spark MLlib").setVersion(version)
    val timestamp = new Timestamp()
      .addContent(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss").format(new Date()))
    val header = new Header()
      .setApplication(app)
      .setTimestamp(timestamp)
    new PMML("4.2", header, null)
  }
} 
Example 11
Source File: DriverInfo.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master

import java.util.Date

import org.apache.spark.deploy.DriverDescription
import org.apache.spark.util.Utils

private[deploy] class DriverInfo(
    val startTime: Long,
    val id: String,
    val desc: DriverDescription,
    val submitDate: Date)
  extends Serializable {

  @transient var state: DriverState.Value = DriverState.SUBMITTED
  
  @transient var worker: Option[WorkerInfo] = None

  init()

  private def readObject(in: java.io.ObjectInputStream): Unit = Utils.tryOrIOException {
    in.defaultReadObject()
    init()
  }

  private def init(): Unit = {
    state = DriverState.SUBMITTED
    worker = None
    exception = None
  }
} 
Example 12
Source File: AllStagesResourceSuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.status.api.v1

import java.util.Date

import scala.collection.mutable.LinkedHashMap

import org.apache.spark.SparkFunSuite
import org.apache.spark.scheduler.{StageInfo, TaskInfo, TaskLocality}
import org.apache.spark.ui.jobs.UIData.{StageUIData, TaskUIData}

class AllStagesResourceSuite extends SparkFunSuite {

  def getFirstTaskLaunchTime(taskLaunchTimes: Seq[Long]): Option[Date] = {
    val tasks = new LinkedHashMap[Long, TaskUIData]
    taskLaunchTimes.zipWithIndex.foreach { case (time, idx) =>
      tasks(idx.toLong) = TaskUIData(
        new TaskInfo(idx, idx, 1, time, "", "", TaskLocality.ANY, false), None)
    }

    val stageUiData = new StageUIData()
    stageUiData.taskData = tasks
    val status = StageStatus.ACTIVE
    val stageInfo = new StageInfo(
      1, 1, "stage 1", 10, Seq.empty, Seq.empty, "details abc")
    val stageData = AllStagesResource.stageUiToStageData(status, stageInfo, stageUiData, false)

    stageData.firstTaskLaunchedTime
  }

  test("firstTaskLaunchedTime when there are no tasks") {
    val result = getFirstTaskLaunchTime(Seq())
    assert(result == None)
  }

  test("firstTaskLaunchedTime when there are tasks but none launched") {
    val result = getFirstTaskLaunchTime(Seq(-100L, -200L, -300L))
    assert(result == None)
  }

  test("firstTaskLaunchedTime when there are tasks and some launched") {
    val result = getFirstTaskLaunchTime(Seq(-100L, 1449255596000L, 1449255597000L))
    assert(result == Some(new Date(1449255596000L)))
  }

} 
Example 13
Source File: MasterWebUISuite.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master.ui

import java.io.DataOutputStream
import java.net.{HttpURLConnection, URL}
import java.nio.charset.StandardCharsets
import java.util.Date

import scala.collection.mutable.HashMap

import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest.BeforeAndAfterAll

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.DeployMessages.{KillDriverResponse, RequestKillDriver}
import org.apache.spark.deploy.DeployTestUtils._
import org.apache.spark.deploy.master._
import org.apache.spark.rpc.{RpcEndpointRef, RpcEnv}


class MasterWebUISuite extends SparkFunSuite with BeforeAndAfterAll {

  val conf = new SparkConf
  val securityMgr = new SecurityManager(conf)
  val rpcEnv = mock(classOf[RpcEnv])
  val master = mock(classOf[Master])
  val masterEndpointRef = mock(classOf[RpcEndpointRef])
  when(master.securityMgr).thenReturn(securityMgr)
  when(master.conf).thenReturn(conf)
  when(master.rpcEnv).thenReturn(rpcEnv)
  when(master.self).thenReturn(masterEndpointRef)
  val masterWebUI = new MasterWebUI(master, 0)

  override def beforeAll() {
    super.beforeAll()
    masterWebUI.bind()
  }

  override def afterAll() {
    masterWebUI.stop()
    super.afterAll()
  }

  test("kill application") {
    val appDesc = createAppDesc()
    // use new start date so it isn't filtered by UI
    val activeApp = new ApplicationInfo(
      new Date().getTime, "app-0", appDesc, new Date(), null, Int.MaxValue)

    when(master.idToApp).thenReturn(HashMap[String, ApplicationInfo]((activeApp.id, activeApp)))

    val url = s"http://localhost:${masterWebUI.boundPort}/app/kill/"
    val body = convPostDataToString(Map(("id", activeApp.id), ("terminate", "true")))
    val conn = sendHttpRequest(url, "POST", body)
    conn.getResponseCode

    // Verify the master was called to remove the active app
    verify(master, times(1)).removeApplication(activeApp, ApplicationState.KILLED)
  }

  test("kill driver") {
    val activeDriverId = "driver-0"
    val url = s"http://localhost:${masterWebUI.boundPort}/driver/kill/"
    val body = convPostDataToString(Map(("id", activeDriverId), ("terminate", "true")))
    val conn = sendHttpRequest(url, "POST", body)
    conn.getResponseCode

    // Verify that master was asked to kill driver with the correct id
    verify(masterEndpointRef, times(1)).ask[KillDriverResponse](RequestKillDriver(activeDriverId))
  }

  private def convPostDataToString(data: Map[String, String]): String = {
    (for ((name, value) <- data) yield s"$name=$value").mkString("&")
  }

  
  private def sendHttpRequest(
      url: String,
      method: String,
      body: String = ""): HttpURLConnection = {
    val conn = new URL(url).openConnection().asInstanceOf[HttpURLConnection]
    conn.setRequestMethod(method)
    if (body.nonEmpty) {
      conn.setDoOutput(true)
      conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded")
      conn.setRequestProperty("Content-Length", Integer.toString(body.length))
      val out = new DataOutputStream(conn.getOutputStream)
      out.write(body.getBytes(StandardCharsets.UTF_8))
      out.close()
    }
    conn
  }
} 
Example 14
Source File: DeployTestUtils.scala    From drizzle-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy

import java.io.File
import java.util.Date

import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.{DriverRunner, ExecutorRunner}

private[deploy] object DeployTestUtils {
  def createAppDesc(): ApplicationDescription = {
    val cmd = new Command("mainClass", List("arg1", "arg2"), Map(), Seq(), Seq(), Seq())
    new ApplicationDescription("name", Some(4), 1234, cmd, "appUiUrl")
  }

  def createAppInfo() : ApplicationInfo = {
    val appDesc = createAppDesc()
    val appInfo = new ApplicationInfo(JsonConstants.appInfoStartTime,
      "id", appDesc, JsonConstants.submitDate, null, Int.MaxValue)
    appInfo.endTime = JsonConstants.currTimeInMillis
    appInfo
  }

  def createDriverCommand(): Command = new Command(
    "org.apache.spark.FakeClass", Seq("some arg --and-some options -g foo"),
    Map(("K1", "V1"), ("K2", "V2")), Seq("cp1", "cp2"), Seq("lp1", "lp2"), Seq("-Dfoo")
  )

  def createDriverDesc(): DriverDescription =
    new DriverDescription("hdfs://some-dir/some.jar", 100, 3, false, createDriverCommand())

  def createDriverInfo(): DriverInfo = new DriverInfo(3, "driver-3",
    createDriverDesc(), new Date())

  def createWorkerInfo(): WorkerInfo = {
    val workerInfo = new WorkerInfo("id", "host", 8080, 4, 1234, null, "http://publicAddress:80")
    workerInfo.lastHeartbeat = JsonConstants.currTimeInMillis
    workerInfo
  }

  def createExecutorRunner(execId: Int): ExecutorRunner = {
    new ExecutorRunner(
      "appId",
      execId,
      createAppDesc(),
      4,
      1234,
      null,
      "workerId",
      "host",
      123,
      "publicAddress",
      new File("sparkHome"),
      new File("workDir"),
      "spark://worker",
      new SparkConf,
      Seq("localDir"),
      ExecutorState.RUNNING)
  }

  def createDriverRunner(driverId: String): DriverRunner = {
    val conf = new SparkConf()
    new DriverRunner(
      conf,
      driverId,
      new File("workDir"),
      new File("sparkHome"),
      createDriverDesc(),
      null,
      "spark://worker",
      new SecurityManager(conf))
  }
} 
Example 15
Source File: Train.scala    From BigDL   with Apache License 2.0 5 votes vote down vote up
package com.intel.analytics.bigdl.models.vgg

import java.text.SimpleDateFormat
import java.util.Date

import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.dataset.DataSet
import com.intel.analytics.bigdl.dataset.image._
import com.intel.analytics.bigdl.nn.{ClassNLLCriterion, Module}
import com.intel.analytics.bigdl.optim._
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric._
import com.intel.analytics.bigdl.utils.{Engine, LoggerFilter, OptimizerV1, OptimizerV2, T, Table}
import com.intel.analytics.bigdl.visualization.{TrainSummary, ValidationSummary}
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext

object Train {
  LoggerFilter.redirectSparkInfoLogs()


  import Utils._

  def main(args: Array[String]): Unit = {
    trainParser.parse(args, new TrainParams()).map(param => {
      val conf = Engine.createSparkConf().setAppName("Train Vgg on Cifar10")
        // Will throw exception without this config when has only one executor
          .set("spark.rpc.message.maxSize", "200")
      val sc = new SparkContext(conf)
      Engine.init

      val trainDataSet = DataSet.array(Utils.loadTrain(param.folder), sc) ->
        BytesToBGRImg() -> BGRImgNormalizer(trainMean, trainStd) ->
        BGRImgToBatch(param.batchSize)

      val model = if (param.modelSnapshot.isDefined) {
        Module.load[Float](param.modelSnapshot.get)
      } else {
        if (param.graphModel) VggForCifar10.graph(classNum = 10) else VggForCifar10(classNum = 10)
      }

      if (param.optimizerVersion.isDefined) {
        param.optimizerVersion.get.toLowerCase match {
          case "optimizerv1" => Engine.setOptimizerVersion(OptimizerV1)
          case "optimizerv2" => Engine.setOptimizerVersion(OptimizerV2)
        }
      }

      val optimMethod = if (param.stateSnapshot.isDefined) {
        OptimMethod.load[Float](param.stateSnapshot.get)
      } else {
        new SGD[Float](learningRate = param.learningRate, learningRateDecay = 0.0,
          weightDecay = param.weightDecay, momentum = 0.9, dampening = 0.0, nesterov = false,
          learningRateSchedule = SGD.EpochStep(25, 0.5))
      }

      val optimizer = Optimizer(
        model = model,
        dataset = trainDataSet,
        criterion = new ClassNLLCriterion[Float]()
      )

      val validateSet = DataSet.array(Utils.loadTest(param.folder), sc) ->
        BytesToBGRImg() -> BGRImgNormalizer(testMean, testStd) ->
        BGRImgToBatch(param.batchSize)

      if (param.checkpoint.isDefined) {
        optimizer.setCheckpoint(param.checkpoint.get, Trigger.everyEpoch)
      }

      if (param.overWriteCheckpoint) {
        optimizer.overWriteCheckpoint()
      }

      if (param.summaryPath.isDefined) {
        val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
        val timeStamp = sdf.format(new Date())
        val trainSummry = new TrainSummary(param.summaryPath.get,
          s"vgg-on-cifar10-train-$timeStamp")
        optimizer.setTrainSummary(trainSummry)
        val validationSummary = new ValidationSummary(param.summaryPath.get,
          s"vgg-on-cifar10-val-$timeStamp")
        optimizer.setValidationSummary(validationSummary)
      }

      optimizer
        .setValidation(Trigger.everyEpoch, validateSet, Array(new Top1Accuracy[Float]))
        .setOptimMethod(optimMethod)
        .setEndWhen(Trigger.maxEpoch(param.maxEpoch))
        .optimize()

      sc.stop()
    })
  }
} 
Example 16
Source File: WikiETL.scala    From CarbonDataLearning   with GNU General Public License v3.0 5 votes vote down vote up
package org.github.xubo245.carbonDataLearning.etl

import java.io.{File, PrintWriter}
import java.text.SimpleDateFormat
import java.util.Date

import scala.io.Source
import scala.util.Random

object WikiETL {
  def main(args: Array[String]): Unit = {
    val directory = "/root/xubo/data"
    val files = new File(directory)
    val out = new PrintWriter("/root/xubo/data/pageviews-20150505time")
    var flag:Int = 10000000;
    var typeMap= Map (("b","wikibooks")
      ,("d","wiktionary")
      ,("m","wikimedia")
      ,("mw","wikipedia mobile")
      ,("n","wikinews")
      ,("q","wikiquote")
      ,("s","wikisource")
      ,("v","wikiversity")
      ,("w","mediawiki"))

    for (file <- files.listFiles().sorted.filter(_.getCanonicalFile.getName.contains("pageviews-20150505-"))) {
      val filePath = file.getCanonicalPath
      println(filePath)
      //            val out = new PrintWriter(filePath + "WithTime")
      val reader = Source.fromFile(filePath)
      val fileName = file.getCanonicalFile.getName
      val delimiter = "\t"
      for (line <- reader.getLines()) {
        val stringBuffer = new StringBuffer()
        val random = new Random()
        val id = flag+random.nextInt(1000000)
        stringBuffer
          .append(id).append(delimiter)
          .append(fileName.substring(10, 14)).append(delimiter)
          .append(fileName.substring(14, 16)).append(delimiter)
          .append(fileName.substring(16, 18)).append(delimiter)
          .append(fileName.substring(19, 21)).append(delimiter)
        val array=line.mkString.split("\\s+")

        if (array.length == 4 && array(2).matches("[0-9]*") && !array(1).contains("\"")) {
          val domain = array(0).split('.')
          stringBuffer.append(domain(0)).append(delimiter)
          if (domain.length > 1) {
            var value: String = typeMap.getOrElse(domain(1), "wiki")
            stringBuffer.append(value).append(delimiter)
          } else {
            stringBuffer.append("wiki").append(delimiter)
          }
          val time = new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date());
          val tid= id*10+random.nextInt(5)
          stringBuffer.append(array(1).replace('_',' ')).append(delimiter)
            .append(tid).append(delimiter)
            .append(array(2)).append(delimiter)
            .append(random.nextInt(100000)).append(delimiter)
            .append(time)

          //          for (i <- 0 until array.length-1){
          //            stringBuffer.append(array(i)).append(delimiter)
          //          }
          //          stringBuffer.append(array(array.length-1))

          //        if (array.length == 4 && array(2).matches("[0-9]*")) {
          //          id = id + 1
          out.println(stringBuffer.toString)
        }
      }
    }
    out.close()
  }
} 
Example 17
Source File: FlowLauncher.scala    From piflow   with BSD 2-Clause "Simplified" License 5 votes vote down vote up
package cn.piflow.util

import java.io.File
import java.util.Date
import java.util.concurrent.CountDownLatch

import cn.piflow.Flow
import org.apache.hadoop.security.SecurityUtil
import org.apache.http.client.methods.{CloseableHttpResponse, HttpPut}
import org.apache.http.entity.StringEntity
import org.apache.http.impl.client.HttpClients
import org.apache.http.util.EntityUtils
import org.apache.spark.launcher.SparkLauncher


object FlowLauncher {

  def launch(flow: Flow) : SparkLauncher = {

    var flowJson = flow.getFlowJson()
    println("FlowLauncher json:" + flowJson)

    val flowJsonencryptAES = SecurityUtil.encryptAES(flowJson)

    var appId : String = ""
    val countDownLatch = new CountDownLatch(1)
    val launcher = new SparkLauncher
    val sparkLauncher =launcher
      .setAppName(flow.getFlowName())
      .setMaster(PropertyUtil.getPropertyValue("spark.master"))
      //.setDeployMode(PropertyUtil.getPropertyValue("spark.deploy.mode"))
      .setAppResource(ConfigureUtil.getPiFlowBundlePath())
      .setVerbose(true)
      .setConf("spark.driver.memory", flow.getDriverMemory())
      .setConf("spark.executor.instances", flow.getExecutorNum())
      .setConf("spark.executor.memory", flow.getExecutorMem())
      .setConf("spark.executor.cores",flow.getExecutorCores())
      .addFile(PropertyUtil.getConfigureFile())
      .addFile(ServerIpUtil.getServerIpFile())
      .setMainClass("cn.piflow.api.StartFlowMain")
      .addAppArgs(flowJsonencryptAES)

    val sparkMaster = PropertyUtil.getPropertyValue("spark.master")
    if(sparkMaster.equals("yarn")){
      sparkLauncher.setDeployMode(PropertyUtil.getPropertyValue("spark.deploy.mode"))
      sparkLauncher.setConf("spark.hadoop.yarn.resourcemanager.hostname", PropertyUtil.getPropertyValue("yarn.resourcemanager.hostname"))
    }

    //add other jars for application
    val classPath = PropertyUtil.getClassPath()
    val classPathFile = new File(classPath)
    if(classPathFile.exists()){
      FileUtil.getJarFile(new File(classPath)).foreach(f => {
        println(f.getPath)
        sparkLauncher.addJar(f.getPath)
      })
    }

    val scalaPath = PropertyUtil.getScalaPath()
    val scalaPathFile = new File(scalaPath)
    if(scalaPathFile.exists()){
      FileUtil.getJarFile(new File(scalaPath)).foreach(f => {
        println(f.getPath)
        sparkLauncher.addJar(f.getPath)
      })
    }

    sparkLauncher
  }

  def stop(appID: String) = {

    println("Stop Flow !!!!!!!!!!!!!!!!!!!!!!!!!!")
    //yarn application kill appId
    val url = ConfigureUtil.getYarnResourceManagerWebAppAddress() + appID + "/state"
    val client = HttpClients.createDefault()
    val put:HttpPut = new HttpPut(url)
    val body ="{\"state\":\"KILLED\"}"
    put.addHeader("Content-Type", "application/json")
    put.setEntity(new StringEntity(body))
    val response:CloseableHttpResponse = client.execute(put)
    val entity = response.getEntity
    val str = EntityUtils.toString(entity,"UTF-8")

    //update db
    println("Update flow state after Stop Flow !!!!!!!!!!!!!!!!!!!!!!!!!!")
    H2Util.updateFlowState(appID, FlowState.KILLED)
    H2Util.updateFlowFinishedTime(appID, new Date().toString)


    "ok"
  }

} 
Example 18
Source File: CORSFilter.scala    From daf-semantics   with Apache License 2.0 5 votes vote down vote up
package it.almawave.kb.http.providers

import javax.ws.rs.container.ContainerRequestContext
import javax.ws.rs.container.ContainerResponseContext
import javax.ws.rs.container.ContainerResponseFilter
import javax.ws.rs.ext.Provider
import java.text.SimpleDateFormat
import java.util.Date
import java.net.URI

@Provider
class CORSFilter extends ContainerResponseFilter {

  override def filter(request: ContainerRequestContext, response: ContainerResponseContext) {

    val headers = response.getHeaders()

    headers.add("Access-Control-Allow-Origin", "*")
    headers.add("Access-Control-Allow-Headers", "origin, content-type, accept, authorization")
    headers.add("Access-Control-Allow-Credentials", "true")
    headers.add("Access-Control-Allow-Methods", "GET, POST, PUT, DELETE, OPTIONS, HEAD")

    // custom headers
    //    headers.add("Server", "Simple Jersey/Jetty HTTP server for RDF")
    //    headers.add("Pragma", "Pragma: no-cache")
    //    headers.add("Link", new URI("http://almawave.it"))

  }

} 
Example 19
Source File: AmazonS3Extensions.scala    From gfc-aws-s3   with Apache License 2.0 5 votes vote down vote up
package com.gilt.gfc.aws.s3.akka

import java.util.Date

import com.amazonaws.services.s3.AmazonS3
import com.amazonaws.services.s3.model.{S3Object, S3ObjectSummary}

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

object AmazonS3Extensions {

  implicit class S3Extensions(val amazonS3: AmazonS3) extends AnyVal {

    import scala.concurrent.blocking

    def mostRecentObject(bucketName: String, prefix: String): Future[Option[S3Object]] = {
      Future {
        mostRecentObjectSummary(bucketName, prefix)
      }.map { objectSummaryOpt =>
        objectSummaryOpt.map { summary =>
          val key = summary.getKey
          amazonS3.getObject(bucketName, key)
        }
      }
    }

    private def mostRecentObjectSummary(bucketName: String, prefix: String): Option[S3ObjectSummary] = {
      import scala.collection.JavaConversions._
      blocking {
        amazonS3.listObjects(bucketName, prefix).getObjectSummaries.toList
      }.sortBy(_.getLastModified)(Ordering[Date].reverse).headOption
    }
  }

} 
Example 20
Source File: SqlDataMapper.scala    From spark-riak-connector   with Apache License 2.0 5 votes vote down vote up
package com.basho.riak.spark.writer.mapper

import java.util.{Calendar, Date}
import com.basho.riak.client.core.query.timeseries.ColumnDescription.ColumnType
import com.basho.riak.client.core.query.timeseries.{Cell, ColumnDescription, Row => RiakRow}
import com.basho.riak.spark.rdd.BucketDef
import com.basho.riak.spark.writer.ts.RowDef
import com.basho.riak.spark.writer.{WriteDataMapper, WriteDataMapperFactory}
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Row => SparkRow}
import com.basho.riak.spark.util.TSConversionUtil._


class SqlDataMapper[T <: SparkRow] extends WriteDataMapper[T, RowDef] {

  override def mapValue(row: T): RowDef = {
    Option(row.schema) match {
      case None => RowDef(createRowByType(row), None)
      case Some(schema) =>
        val (riakRow, columnsDef) = createRiakRowBySchema(row, schema)
        RowDef(riakRow, columnsDef)
    }
  }
}

object SqlDataMapper {
  def factory[T <: SparkRow]: WriteDataMapperFactory[T, RowDef] =
    new WriteDataMapperFactory[T, RowDef] {
      override def dataMapper(bucket: BucketDef): WriteDataMapper[T, RowDef] =
        new SqlDataMapper[T]
    }
} 
Example 21
Source File: DateTimeBuilder.scala    From nyaya   with GNU Lesser General Public License v2.1 5 votes vote down vote up
package nyaya.gen

import java.util.Date
import scala.concurrent.duration.FiniteDuration
import DateTimeBuilder._
import Gen.Now

object DateTimeBuilder {

  sealed abstract class TimeSpec {
    final type Spec = Either[Long, Long => Long]
    protected def fixed(l: Long): Spec = Left(l)
    protected def fn(f: Long => Long): Spec = Right(f)
    val past, future: Spec
  }
  case class Delta(ms: Long) extends TimeSpec {
    override val past = fn(_ - ms)
    override val future = fn(_ + ms)
  }
  case class Fixed(epochMs: Long) extends TimeSpec {
    override val past = fixed(epochMs)
    override val future = past
  }
  case object Unlimited extends TimeSpec {
    override val past = fixed(0)
    override val future = fixed(Long.MaxValue - 1)
  }

  def default(implicit genNow: Gen[Now]): DateTimeBuilder =
    new DateTimeBuilder(genNow, Unlimited, Unlimited)

  val DayMs = 86400000L.toDouble
  val YearMs = DayMs * 365.25
  val MonthMs = YearMs / 12
  val WeekMs = YearMs / 52
}

final class DateTimeBuilder(genNow: Gen[Now], past: TimeSpec, future: TimeSpec) extends DateTimeBuilderJava8 {

  protected def copy(genNow: Gen[Now] = genNow, past: TimeSpec = past, future: TimeSpec = future): DateTimeBuilder =
    new DateTimeBuilder(genNow, past = past, future = future)

  def fromEpochMs(e: Long)            = copy(past = Fixed(e))
  def fromNowMinusMs(d: Long)         = copy(past = Delta(d))
  def fromDate(d: Date)               = fromEpochMs(d.getTime)
  def fromNow                         = fromNowMinusMs(0)
  def fromNowMinus(d: FiniteDuration) = fromNowMinusMs(d.toMillis)
  def fromNowMinusYears(d: Double)    = fromNowMinusMs((YearMs * d).toLong)
  def fromNowMinusMonths(d: Double)   = fromNowMinusMs((MonthMs * d).toLong)
  def fromNowMinusWeeks(d: Double)    = fromNowMinusMs((WeekMs * d).toLong)
  def fromNowMinusDays(d: Double)     = fromNowMinusMs((DayMs * d).toLong)

  def untilEpochMs(e: Long)           = copy(future = Fixed(e))
  def untilNowPlusMs(d: Long)         = copy(future = Delta(d))
  def untilDate(d: Date)              = untilEpochMs(d.getTime)
  def untilNow                        = untilNowPlusMs(0)
  def untilNowPlus(d: FiniteDuration) = untilNowPlusMs(d.toMillis)
  def untilNowPlusYears(d: Double)    = untilNowPlusMs((YearMs * d).toLong)
  def untilNowPlusMonths(d: Double)   = untilNowPlusMs((MonthMs * d).toLong)
  def untilNowPlusWeeks(d: Double)    = untilNowPlusMs((WeekMs * d).toLong)
  def untilNowPlusDays(d: Double)     = untilNowPlusMs((DayMs * d).toLong)

  def aroundNow(d: FiniteDuration) = fromNowMinus(d).untilNowPlus(d)
  def aroundNowMs(d: Long)         = fromNowMinusMs(d).untilNowPlusMs(d)
  def aroundNowDays(d: Double)     = fromNowMinusDays(d).untilNowPlusDays(d)
  def aroundNowMonths(d: Double)   = fromNowMinusMonths(d).untilNowPlusMonths(d)
  def aroundNowWeeks(d: Double)    = fromNowMinusWeeks(d).untilNowPlusWeeks(d)
  def aroundNowYears(d: Double)    = fromNowMinusYears(d).untilNowPlusYears(d)

  def withNowGen(g: Gen[Now]): DateTimeBuilder =
    copy(genNow = g)

  def withNow(now: => Now): DateTimeBuilder =
    withNowGen(Gen point now)

  def withNowMs(nowMs: => Long): DateTimeBuilder =
    withNowGen(Gen point Now(nowMs))

  
  def withNowLive: DateTimeBuilder =
    withNowGen(Now.genNowByName)

  // ===================================================================================================================

  lazy val asEpochMs: Gen[Long] = {
    def specToFn(s: TimeSpec#Spec): Long => Long = s.fold(Function const, identity)
    (past.past, future.future) match {
      case (Left(a), Left(b)) =>
        Gen.chooseLong(a, b)
      case (x, y) =>
        val a = specToFn(x)
        val b = specToFn(y)
        genNow.flatMap(now => Gen.chooseLong(a(now.millisSinceEpoch), b(now.millisSinceEpoch)))
    }
  }

  def asDate: Gen[Date] =
    asEpochMs.map(new Date(_))
} 
Example 22
Source File: EachRunNewFileAppender.scala    From sddf   with GNU General Public License v3.0 5 votes vote down vote up
package de.unihamburg.vsis.sddf.logging

import java.io.File
import java.text.SimpleDateFormat
import java.util.Date

import org.apache.log4j.FileAppender

import de.unihamburg.vsis.sddf.config.Config

class EachRunNewFileAppender extends FileAppender {
  override def setFile(fileName: String, append: Boolean, bufferedIO: Boolean, bufferSize: Int) = {
    val oldFile = new File(fileName)
    val dir = if (oldFile.isDirectory()) oldFile else oldFile.getParentFile
    val fileSuffix = if (oldFile.isDirectory()) ".log" else oldFile.getName
    val newFileName = EachRunNewFileAppender.runUuid + fileSuffix
    val newLogFile = new File(dir, newFileName)
    super.setFile(newLogFile.getPath, append, bufferedIO, bufferSize)
  }
}

object EachRunNewFileAppender {
  val dateFormat = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss");
  val runUuid = System.currentTimeMillis().toString() + "-" + dateFormat.format(new Date())
} 
Example 23
Source File: Authentication.scala    From daf   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package it.gov.daf.common.authentication

import java.util.Date

import com.nimbusds.jwt.JWTClaimsSet
import org.pac4j.core.profile.{CommonProfile, ProfileManager}
import org.pac4j.jwt.config.signature.SecretSignatureConfiguration
import org.pac4j.jwt.credentials.authenticator.JwtAuthenticator
import org.pac4j.jwt.profile.JwtGenerator
import org.pac4j.play.PlayWebContext
import org.pac4j.play.store.PlaySessionStore
import play.api.Configuration
import play.api.mvc.{RequestHeader, Result, Results}

import scala.collection.convert.decorateAsScala._
import scala.collection.mutable

@SuppressWarnings(
  Array(
    "org.wartremover.warts.Throw",
    "org.wartremover.warts.Var"
  )
)
object Authentication extends Results {

  var configuration: Option[Configuration] = None
  var playSessionStore: Option[PlaySessionStore] = None
  var secret: Option[String] = None

  def apply(configuration: Configuration, playSessionStore: PlaySessionStore): Unit = {
    this.configuration = Some(configuration)
    this.playSessionStore = Some(playSessionStore)
    this.secret = this.configuration.flatMap(_.getString("pac4j.jwt_secret"))
  }

  def getClaims(requestHeader: RequestHeader): Option[mutable.Map[String, AnyRef]] = {

    val header: Option[String] = requestHeader.headers.get("Authorization")
    val token: Option[String] = for {
      h <- header
      t <- h.split("Bearer").lastOption
    } yield t.trim

    getClaimsFromToken(token)
  }

  def getClaimsFromToken(token: Option[String]): Option[mutable.Map[String, AnyRef]] = {
    val jwtAuthenticator = new JwtAuthenticator()
    jwtAuthenticator.addSignatureConfiguration(new SecretSignatureConfiguration(secret.getOrElse(throw new Exception("missing secret"))))
    token.map(jwtAuthenticator.validateTokenAndGetClaims(_).asScala)
  }

  def getProfiles(request: RequestHeader): List[CommonProfile] = {
    val webContext = new PlayWebContext(request, playSessionStore.getOrElse(throw new Exception("missing playSessionStore")))
    val profileManager = new ProfileManager[CommonProfile](webContext)
    profileManager.getAll(true).asScala.toList
  }

  def getStringToken: (RequestHeader,Long) => Option[String] = (request: RequestHeader,minutes:Long)  => {
    val generator = new JwtGenerator[CommonProfile](new SecretSignatureConfiguration(secret.getOrElse(throw new Exception("missing secret"))))
    val profiles = getProfiles(request)
    val token: Option[String] = profiles.headOption.map(profile => {
      val expDate = new Date( (new Date).getTime + 1000L*60L*minutes )//*60L*24L
      val claims = new JWTClaimsSet.Builder().expirationTime(expDate).build()
      profile.addAttributes(claims.getClaims)
      generator.generate(profile)
    })
    token
  }

  def getToken: (RequestHeader,Long) => Result = (request: RequestHeader, minutes:Long) => {
    Ok(getStringToken(request,minutes).getOrElse(""))
  }

} 
Example 24
Source File: Collection_7_imm_Tuples.scala    From HadoopLearning   with MIT License 5 votes vote down vote up
package com.c503.scala

import java.util.Date


object Collection_7_imm_Tuples {

  def main(args: Array[String]): Unit = {

    val tuples_1 = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22)
    val tuples_2 = Tuple3[Int, String, Int](1, "dd", 2)
    val tuples_3 = (1, 2, 3)
    val tuples_4 = Tuple3(1, "hello", Console)

    println(tuples_1)
    println(tuples_2)
    println(tuples_3)
    println(tuples_4)

    println("*" * 40)

    val tupleDefault = ("Scala", "Scala", "Spark", 120, 12, 34, new Date())
    println(tupleDefault._1)
    println(tupleDefault._2)
    println(tupleDefault._3)
    println(tupleDefault._4)
    println(tupleDefault._5)
    println(tupleDefault._6)
    println(tupleDefault._7)

    println("*" * 40)

    val t = Tuple2("Scala", "Hello")
    println("Tuple:" + t)
    println("swapped Tuple:" + t.swap)

    println("*" * 40)

    val t1 = (4, 3, 2, 1)
    t1.productIterator.foreach(println)

    val arrkey = Array(1, 3, 5)
    val arrValue = Array("a", "b", "c", "d")
    val tupleArr = arrkey.zipAll(arrValue, 1, 2)
    val map = tupleArr.toMap
    println(map)
    println(tupleArr.getClass.getName)

    println("*" * 40)

    val tuples_A_1 = ("A")
    val tuples_A_2 = Tuple1[String]("A")
    val tuples_A_3 = Tuple1[Int](1)
    val tuple_B = ("A", "B")
    val tuple_C = ("A", "B", "C")
    val tuple_D = ("A", "B", "C", "D")
    println(tuples_A_1.getClass.getSimpleName)
    println(tuples_A_2.getClass.getSimpleName)
    println(tuples_A_3.getClass.getSimpleName)
    println(tuple_B.getClass.getSimpleName)
    println(tuple_C.getClass.getSimpleName)
    println(tuple_D.getClass.getSimpleName)

  }

} 
Example 25
Source File: CommonEntranceParser.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.entrance.parser

import java.util
import java.util.Date

import com.webank.wedatasphere.linkis.entrance.conf.EntranceConfiguration
import com.webank.wedatasphere.linkis.entrance.exception.EntranceIllegalParamException
import com.webank.wedatasphere.linkis.protocol.constants.TaskConstant
import com.webank.wedatasphere.linkis.protocol.query.RequestPersistTask
import com.webank.wedatasphere.linkis.protocol.task.Task
import com.webank.wedatasphere.linkis.rpc.Sender
import com.webank.wedatasphere.linkis.scheduler.queue.SchedulerEventState
import org.apache.commons.lang.StringUtils
import org.slf4j.LoggerFactory


    if (EntranceConfiguration.DEFAULT_REQUEST_APPLICATION_NAME.getValue.equals(creator) && StringUtils.isEmpty(source.get(TaskConstant.SCRIPTPATH)) &&
      StringUtils.isEmpty(executionCode))
      throw new EntranceIllegalParamException(20007, "param executionCode and scriptPath can not be empty at the same time")
    var runType:String = null
    if (StringUtils.isNotEmpty(executionCode)) {
      runType = params.get(TaskConstant.RUNTYPE).asInstanceOf[String]
      if (StringUtils.isEmpty(runType)) runType = EntranceConfiguration.DEFAULT_RUN_TYPE.getValue
      //If formatCode is not empty, we need to format it(如果formatCode 不为空的话,我们需要将其进行格式化)
      if (formatCode) executionCode = format(executionCode)
      task.setExecutionCode(executionCode)
    }
    task.setSource(source)
    task.setEngineType(runType)
    //为了兼容代码,让engineType和runType都有同一个属性
    task.setRunType(runType)
    task.setExecuteApplicationName(executeApplicationName)
    task.setRequestApplicationName(creator)
    task.setStatus(SchedulerEventState.Inited.toString)
    task
  }
  //todo to format code using proper way
  private def format(code:String):String = code

} 
Example 26
Source File: CSTableRegister.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.engine.cs

import java.util.Date

import com.webank.wedatasphere.linkis.common.io.resultset.ResultSetWriter
import com.webank.wedatasphere.linkis.common.io.{MetaData, Record}
import com.webank.wedatasphere.linkis.common.utils.Logging
import com.webank.wedatasphere.linkis.cs.client.service.CSTableService
import com.webank.wedatasphere.linkis.cs.client.utils.{ContextServiceUtils, SerializeHelper}
import com.webank.wedatasphere.linkis.cs.common.entity.enumeration.{ContextScope, ContextType}
import com.webank.wedatasphere.linkis.cs.common.entity.metadata.{CSColumn, CSTable}
import com.webank.wedatasphere.linkis.cs.common.entity.source.CommonContextKey
import com.webank.wedatasphere.linkis.cs.common.utils.CSCommonUtils
import com.webank.wedatasphere.linkis.engine.execute.EngineExecutorContext
import com.webank.wedatasphere.linkis.storage.domain.Column
import com.webank.wedatasphere.linkis.storage.utils.StorageUtils
import org.apache.commons.lang.StringUtils


object CSTableRegister extends Logging{

  def registerTempTable(engineExecutorContext: EngineExecutorContext,
                        writer: ResultSetWriter[_ <: MetaData, _ <: Record], alias: String, columns: Array[Column]): Unit = {

    val contextIDValueStr = ContextServiceUtils.getContextIDStrByMap(engineExecutorContext.getProperties)
    val nodeNameStr = ContextServiceUtils.getNodeNameStrByMap(engineExecutorContext.getProperties)

    if (StringUtils.isNotBlank(contextIDValueStr) && StringUtils.isNotBlank(nodeNameStr)) {
      info(s"Start to register TempTable nodeName:$nodeNameStr")
      writer.flush()
      val tableName = if (StringUtils.isNotBlank(alias)) s"${CSCommonUtils.CS_TMP_TABLE_PREFIX}${nodeNameStr}_${alias}" else {
        var i = 1;
        var rsName: String = null;
        while (StringUtils.isEmpty(rsName)) {
          val tmpTable = s"${CSCommonUtils.CS_TMP_TABLE_PREFIX}${nodeNameStr}_rs${i}"
          i = i + 1
          val contextKey = new CommonContextKey
          contextKey.setContextScope(ContextScope.FRIENDLY)
          contextKey.setContextType(ContextType.METADATA)
          contextKey.setKey(CSCommonUtils.getTableKey(nodeNameStr, tmpTable))
          val table = CSTableService.getInstance().getCSTable(contextIDValueStr, SerializeHelper.serializeContextKey(contextKey))
          if (null == table) {
            rsName = tmpTable
          }
        }
        rsName
      }
      val csTable = new CSTable
      csTable.setName(tableName)
      csTable.setAlias(alias)
      csTable.setAvailable(true)
      csTable.setComment("cs temp table")
      csTable.setCreateTime(new Date())
      csTable.setCreator(StorageUtils.getJvmUser)
      csTable.setExternalUse(true)
      csTable.setImport(false)
      csTable.setLocation(writer.toString)
      csTable.setPartitionTable(false)
      csTable.setView(true)
      val csColumns = columns.map { column =>
        val csColumn = new CSColumn
        csColumn.setName(column.columnName)
        csColumn.setType(column.dataType.typeName)
        csColumn.setComment(column.comment)
        csColumn
      }
      csTable.setColumns(csColumns)
      val contextKey = new CommonContextKey
      contextKey.setContextScope(ContextScope.PUBLIC)
      contextKey.setContextType(ContextType.METADATA)
      contextKey.setKey(CSCommonUtils.getTableKey(nodeNameStr, tableName))
      CSTableService.getInstance().putCSTable(contextIDValueStr, SerializeHelper.serializeContextKey(contextKey), csTable)
      info(s"Finished to register TempTable nodeName:$nodeNameStr")
    }
  }
} 
Example 27
Source File: DataWorkCloudEngineApplication.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.engine

import java.text.SimpleDateFormat
import java.util.Date

import com.webank.wedatasphere.linkis.DataWorkCloudApplication
import com.webank.wedatasphere.linkis.common.conf.DWCArgumentsParser
import com.webank.wedatasphere.linkis.common.utils.Utils
import com.webank.wedatasphere.linkis.engine.conf.EngineConfiguration
import com.webank.wedatasphere.linkis.server.conf.ServerConfiguration
import org.apache.commons.lang.StringUtils
import org.slf4j.LoggerFactory


object DataWorkCloudEngineApplication {

  val userName:String = System.getProperty("user.name")
  val hostName:String = Utils.getComputerName
  val appName:String = EngineConfiguration.ENGINE_SPRING_APPLICATION_NAME.getValue
  val prefixName:String = EngineConfiguration.ENGINE_LOG_PREFIX.getValue
  val timeStamp:Long = System.currentTimeMillis()
  private val timeFormat = new SimpleDateFormat("yyyy-MM-dd_HH:mm:ss")
  private val dateFormat = new SimpleDateFormat("yyyy-MM-dd")
  val time:String = timeFormat.format(new Date(timeStamp))
  val date:String = dateFormat.format(new Date(timeStamp))

  val isTimeStampSuffix:Boolean = "true".equalsIgnoreCase(EngineConfiguration.ENGINE_LOG_TIME_STAMP_SUFFIX.getValue)
  val shortLogFile:String =
    if (isTimeStampSuffix) appName + "_" + hostName + "_" + userName + "_"  + time + ".log"
    else appName + "_" + hostName + "_" + userName + ".log"
  val logName:String =
    if(isTimeStampSuffix) prefixName + "/" + userName + "/" + shortLogFile
    else prefixName + "/" + shortLogFile
  System.setProperty("engineLogFile", logName)
  System.setProperty("shortEngineLogFile", shortLogFile)
//  System.setProperty("engineLogFile", logName)
//  val context:LoggerContext = LogManager.getContext(false).asInstanceOf[LoggerContext]
//  val path:String = getClass.getResource("/").getPath
//  val log4j2XMLFile:File = new File(path + "/log4j2-engine.xml")
//  val configUri:URI = log4j2XMLFile.toURI
//  context.setConfigLocation(configUri)
  private val logger = LoggerFactory.getLogger(getClass)
  logger.info(s"Now log4j2 Rolling File is set to be $logName")
  logger.info(s"Now shortLogFile is set to be $shortLogFile")
  def main(args: Array[String]): Unit = {
    val parser = DWCArgumentsParser.parse(args)
    DWCArgumentsParser.setDWCOptionMap(parser.getDWCConfMap)
    val existsExcludePackages = ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES.getValue
    if(StringUtils.isEmpty(existsExcludePackages))
      DataWorkCloudApplication.setProperty(ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES.key, "com.webank.wedatasphere.linkis.enginemanager")
    else
      DataWorkCloudApplication.setProperty(ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES.key, existsExcludePackages + ",com.webank.wedatasphere.linkis.enginemanager")
    DataWorkCloudApplication.main(DWCArgumentsParser.formatSpringOptions(parser.getSpringConfMap))
  }
} 
Example 28
Source File: JobInfoResult.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
@DWSHttpMessageResult("/api/rest_j/v\\d+/jobhistory/\\S+/get")
class JobInfoResult extends DWSResult with UserAction with Status {

  private var task: java.util.Map[_, _] = _
  private var requestPersistTask: RequestPersistTask = _
  private var resultSetList: Array[String] = _

  def setTask(task: util.Map[_, _]): Unit = {
    this.task = task
    requestPersistTask = new RequestPersistTask
    val createdTime = task.get("createdTime").asInstanceOf[Long]
    val updatedTime = task.get("updatedTime").asInstanceOf[Long]
    task.remove("createdTime")
    task.remove("updatedTime")
    task.remove("engineStartTime")
    Utils.tryCatch{
      BeanUtils.populate(requestPersistTask, task.asInstanceOf[util.Map[String, _]])
    }{
      case e:Exception => error("copy failed", e)
    }
    requestPersistTask.setStatus(task.get("status").asInstanceOf[String])
    requestPersistTask.setCreatedTime(new Date(createdTime))
    requestPersistTask.setUpdatedTime(new Date(updatedTime))
    requestPersistTask.setEngineStartTime(new Date(updatedTime))
  }

  def getTask = task

  def getRequestPersistTask: RequestPersistTask = requestPersistTask

  def getResultSetList(ujesClient: UJESClient): Array[String] = {
    if(isSucceed && resultSetList == null) synchronized {
      if(resultSetList != null) return resultSetList
      resultSetList = ujesClient.executeUJESJob(ResultSetListAction.builder().set(this).build()) match {
        case resultSetList: ResultSetListResult => resultSetList.getResultSetList
      }
      resultSetList
    } else if(resultSetList != null) resultSetList
    else if(isFailed) throw new UJESJobException(requestPersistTask.getErrCode, requestPersistTask.getErrDesc)
    else throw new UJESJobException(s"job ${requestPersistTask.getTaskID} is still executing with state ${requestPersistTask.getStatus}.")
  }

  override def getJobStatus: String = requestPersistTask.getStatus
} 
Example 29
Source File: ApplicationUtil.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.application.util

import java.text.SimpleDateFormat
import java.util.Date

import com.webank.wedatasphere.linkis.application.conf.{ApplicationConfiguration, ApplicationScalaConfiguration}


object ApplicationUtil {
  def getFlowsJson(user:String,date:Date):String ={
    val initExamplePath = ApplicationScalaConfiguration.INIT_EXAMPLE_PATH.getValue.toString + user + "/application/dataStudio/"
    val sqlName = ApplicationScalaConfiguration.INIT_EXAMPLE_SQL_NAME.getValue.toString
    val scalaName = ApplicationScalaConfiguration.INIT_EXAMPLE_SCALA_NAME.getValue.toString
    val spyName = ApplicationScalaConfiguration.INIT_EXAMPLE_SPY_NAME.getValue.toString
    val hqlName = ApplicationScalaConfiguration.INIT_EXAMPLE_HQL_NAME.getValue.toString
    val pythonName = ApplicationScalaConfiguration.INIT_EXAMPLE_PYTHON_NAME.getValue.toString
    val formateDate =  new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(date)
    s"""[{"id":1,"name":"Default business process(默认业务流程)","createTime":"$formateDate","lastUpdateTime":"","description":"Default business process(默认业务流程)","version":"1.0.0","owner":"$user","canPublished":false,"params":{},"relations":[],"projectChildren":[],"flowChildren":[],"nodeChildren":{"dataExchange":[],"dataStudio":[{"id":1,"name":"$sqlName","type":"${sqlName.substring(sqlName.lastIndexOf(".")+1)}","createTime":"$formateDate","lastUpdateTime":"","description":"","version":"1.0.0","owner":"$user","content":{"scriptPath":"${initExamplePath + sqlName}"}},{"id":2,"name":"$scalaName","type":"${scalaName.substring(scalaName.lastIndexOf(".")+1)}","createTime":"$formateDate","lastUpdateTime":"","description":"","version":"1.0.0","owner":"$user","content":{"scriptPath":"${initExamplePath + scalaName}"}},{"id":3,"name":"$spyName","type":"${spyName.substring(spyName.lastIndexOf(".")+1)}","createTime":"$formateDate","lastUpdateTime":"","description":"","version":"1.0.0","owner":"$user","content":{"scriptPath":"${initExamplePath + spyName}"}},{"id":4,"name":"$hqlName","type":"${hqlName.substring(hqlName.lastIndexOf(".")+1)}","createTime":"$formateDate","lastUpdateTime":"","description":"","version":"1.0.0","owner":"$user","content":{"scriptPath":"${initExamplePath + hqlName}"}},{"id":5,"name":"$pythonName","type":"${pythonName.substring(pythonName.lastIndexOf(".")+1)}","createTime":"$formateDate","lastUpdateTime":"","description":"","version":"1.0.0","owner":"$user","content":{"scriptPath":"${initExamplePath + pythonName}"}}],"dataBI":[],"resources":[]}}]"""
  }
} 
Example 30
Source File: QueryUtils.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.jobhistory.util

import java.io.{InputStream, OutputStream}
import java.util.Date

import com.webank.wedatasphere.linkis.common.conf.CommonVars
import com.webank.wedatasphere.linkis.common.io.FsPath
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.jobhistory.entity.QueryTask
import com.webank.wedatasphere.linkis.protocol.query.RequestInsertTask
import com.webank.wedatasphere.linkis.storage.FSFactory
import com.webank.wedatasphere.linkis.storage.fs.FileSystem
import com.webank.wedatasphere.linkis.storage.utils.{FileSystemUtils, StorageUtils}
import org.apache.commons.io.IOUtils
import org.apache.commons.lang.time.DateFormatUtils


object QueryUtils extends Logging {

  private val CODE_STORE_PREFIX = CommonVars("bdp.dataworkcloud.query.store.prefix", "hdfs:///tmp/bdp-ide/")
  private val CODE_STORE_SUFFIX = CommonVars("bdp.dataworkcloud.query.store.suffix", "")
  private val CHARSET = "utf-8"
  private val CODE_SPLIT = ";"
  private val LENGTH_SPLIT = "#"

  def storeExecutionCode(requestInsertTask: RequestInsertTask): Unit = {
    if (requestInsertTask.getExecutionCode.length < 60000) return
    val user: String = requestInsertTask.getUmUser
    val path: String = getCodeStorePath(user)
    val fsPath: FsPath = new FsPath(path)
    val fileSystem = FSFactory.getFsByProxyUser(fsPath, user).asInstanceOf[FileSystem]
    fileSystem.init(null)
    var os: OutputStream = null
    var position = 0L
    val codeBytes = requestInsertTask.getExecutionCode.getBytes(CHARSET)
    path.intern() synchronized {
      Utils.tryFinally {
        if (!fileSystem.exists(fsPath)) FileSystemUtils.createNewFile(fsPath, user, true)
        os = fileSystem.write(fsPath, false)
        position = fileSystem.get(path).getLength
        IOUtils.write(codeBytes, os)
      } {
        IOUtils.closeQuietly(os)
        if (fileSystem != null) fileSystem.close()
      }
    }
    val length = codeBytes.length
    requestInsertTask.setExecutionCode(path + CODE_SPLIT + position + LENGTH_SPLIT + length)
  }

  def exchangeExecutionCode(queryTask: QueryTask): Unit = {
    import scala.util.control.Breaks._
    if (queryTask.getExecutionCode == null || !queryTask.getExecutionCode.startsWith(StorageUtils.HDFS_SCHEMA)) return
    val codePath = queryTask.getExecutionCode
    val path = codePath.substring(0, codePath.lastIndexOf(CODE_SPLIT))
    val codeInfo = codePath.substring(codePath.lastIndexOf(CODE_SPLIT) + 1)
    val infos: Array[String] = codeInfo.split(LENGTH_SPLIT)
    val position = infos(0).toInt
    var lengthLeft = infos(1).toInt
    val tub = new Array[Byte](1024)
    val executionCode: StringBuilder = new StringBuilder
    val fsPath: FsPath = new FsPath(path)
    val fileSystem = FSFactory.getFsByProxyUser(fsPath, queryTask.getUmUser).asInstanceOf[FileSystem]
    fileSystem.init(null)
    var is: InputStream = null
    if (!fileSystem.exists(fsPath)) return
    Utils.tryFinally {
      is = fileSystem.read(fsPath)
      if (position > 0) is.skip(position)
      breakable {
        while (lengthLeft > 0) {
          val readed = is.read(tub)
          val useful = Math.min(readed, lengthLeft)
          if (useful < 0) break()
          lengthLeft -= useful
          executionCode.append(new String(tub, 0, useful, CHARSET))
        }
      }
    } {
      if (fileSystem != null) fileSystem.close()
      IOUtils.closeQuietly(is)
    }
    queryTask.setExecutionCode(executionCode.toString())
  }

  private def getCodeStorePath(user: String): String = {
    val date: String = DateFormatUtils.format(new Date, "yyyyMMdd")
    s"${CODE_STORE_PREFIX.getValue}${user}${CODE_STORE_SUFFIX.getValue}/executionCode/${date}/_scripts"
  }
} 
Example 31
Source File: CSTableResultSetWriter.scala    From Linkis   with Apache License 2.0 5 votes vote down vote up
package com.webank.wedatasphere.linkis.cs.storage

import java.util.Date

import com.webank.wedatasphere.linkis.common.io.FsPath
import com.webank.wedatasphere.linkis.common.utils.Logging
import com.webank.wedatasphere.linkis.cs.client.service.CSTableService
import com.webank.wedatasphere.linkis.cs.common.entity.metadata.{CSColumn, CSTable}
import com.webank.wedatasphere.linkis.storage.resultset.StorageResultSetWriter
import com.webank.wedatasphere.linkis.storage.resultset.table.{TableMetaData, TableRecord, TableResultSet}
import com.webank.wedatasphere.linkis.storage.utils.StorageUtils
import org.apache.commons.lang.StringUtils


class CSTableResultSetWriter(tableResult: TableResultSet, maxCacheSize: Long,
                             storePath: FsPath, contextIDStr: String, nodeName: String, alias: String) extends StorageResultSetWriter[TableMetaData, TableRecord](tableResult, maxCacheSize, storePath) with Logging {

  override def toString: String = {
    try {
      registerToCS
    } catch {
      case t: Throwable =>
        info("Failed to register tmp table", t)
    }
    super.toString
  }

  private def registerToCS: Unit = {

    if (StringUtils.isNotBlank(contextIDStr) && StringUtils.isNotBlank(nodeName) && !isEmpty) {
      info("Start to register resultSet to cs")
      flush()
      val csTable = new CSTable
      csTable.setAlias(alias)
      csTable.setAvailable(true)
      csTable.setComment("cs temp table")
      csTable.setCreateTime(new Date())
      csTable.setCreator(StorageUtils.getJvmUser)
      csTable.setExternalUse(true)
      csTable.setImport(false)
      csTable.setLocation(toFSPath.getSchemaPath)
      csTable.setPartitionTable(false)
      csTable.setView(true)
      val csColumns = getMetaData.asInstanceOf[TableMetaData].columns.map { column =>
        val csColumn = new CSColumn
        csColumn.setName(column.columnName)
        csColumn.setType(column.dataType.typeName)
        csColumn.setComment(column.comment)
        csColumn
      }
      csTable.setColumns(csColumns)
      CSTableService.getInstance().registerCSTable(contextIDStr, nodeName, alias, csTable)
      info("Finished to register resultSet to cs")
    }
  }
} 
Example 32
Source File: _06_02_PartiallyAppliedFunctions.scala    From LearningScala   with Apache License 2.0 5 votes vote down vote up
package _010_functions

import java.util.Date


object _06_02_PartiallyAppliedFunctions {

  def log(date: Date, message: String): Unit = {
    println(s"$date --> $message")
  }

  def main(args: Array[String]): Unit = {
    // Fully Applied Function
    val sum = (a: Int, b: Int, c: Int) => a + b + c
    println(s"sum(1, 2, 3): ${sum(1, 2, 3)}")
    println

    // Partially Applied Function!
    val f1 = sum(_: Int, 10, 20)
    val f2 = sum(10, _: Int, 20)
    val f3 = sum(10, 20, _: Int)
    val f4 = sum(10, _: Int, _: Int)
    val f5 = sum(_: Int, _: Int, 10)

    println(s"f1(30): ${f1(30)}")
    println(s"f2(30): ${f2(30)}")
    println(s"f3(30): ${f3(30)}")
    println(s"f4(30): ${f4(30, 40)}")
    println(s"f5(30): ${f5(30, 40)}")
    println

    val date = new Date()
    val newLog = log(date, _: String)

    newLog("Hi")
    newLog("Hello")
  }
} 
Example 33
Source File: Mailer.scala    From pulse   with Apache License 2.0 5 votes vote down vote up
package io.phdata.pulse.alertengine.notification

import javax.mail._
import java.util.{ Date, Properties }
import javax.mail.internet.{ InternetAddress, MimeMessage }
import com.typesafe.scalalogging.LazyLogging

class Mailer(smtpServer: String,
             port: Long = 25,
             username: String,
             password: Option[String],
             use_smtp_tls: Boolean)
    extends LazyLogging {
  private val props = new Properties()
  props.put("mail.smtp.host", smtpServer)
  props.put("mail.smtp.port", port.toString)
  if (use_smtp_tls) {
    props.put("mail.smtp.starttls.enable", "true")
  }

  def sendMail(addresses: List[String], subject: String, body: String): Unit = {
    val session = password.fold {
      logger.info("no password supplied, skipping authentication")
      props.put("mail.smtp.auth", "false")
      Session.getInstance(props)
    } { password =>
      logger.info("authenticating with password")
      val auth = new Authenticator {
        override def getPasswordAuthentication = new PasswordAuthentication(username, password)
      }
      props.put("mail.smtp.auth", "true")
      Session.getInstance(props, auth)
    }

    val message: Message = new MimeMessage(session)
    message.setFrom(new InternetAddress(username))
    message.setSentDate(new Date())
    addresses.foreach { a =>
      message.addRecipients(Message.RecipientType.TO,
                            InternetAddress.parse(a).asInstanceOf[Array[Address]])
    }
    message.setSubject(subject)
    message.setText(body)
    Transport.send(message)
  }
} 
Example 34
Source File: Authorize.scala    From keycloak-benchmark   with Apache License 2.0 5 votes vote down vote up
package io.gatling.keycloak

import java.text.SimpleDateFormat
import java.util.{Date, Collections}

import akka.actor.ActorDSL.actor
import akka.actor.ActorRef
import io.gatling.core.action.Interruptable
import io.gatling.core.action.builder.ActionBuilder
import io.gatling.core.config.Protocols
import io.gatling.core.result.writer.DataWriterClient
import io.gatling.core.session._
import io.gatling.core.validation._
import org.jboss.logging.Logger
import org.keycloak.adapters.spi.AuthOutcome
import org.keycloak.adapters.KeycloakDeploymentBuilder
import org.keycloak.adapters.spi.HttpFacade.Cookie
import org.keycloak.common.enums.SslRequired
import org.keycloak.representations.adapters.config.AdapterConfig

import scala.collection.JavaConverters._

case class AuthorizeAttributes(
  requestName: Expression[String],
  uri: Expression[String],
  cookies: Expression[List[Cookie]],
  sslRequired: SslRequired = SslRequired.EXTERNAL,
  resource: String = null,
  password: String = null,
  realm: String = null,
  realmKey: String = null,
  authServerUrl: Expression[String] = _ => Failure("no server url")
) {
  def toAdapterConfig(session: Session) = {
    val adapterConfig = new AdapterConfig
    adapterConfig.setSslRequired(sslRequired.toString)
    adapterConfig.setResource(resource)
    adapterConfig.setCredentials(Collections.singletonMap("secret", password))
    adapterConfig.setRealm(realm)
    adapterConfig.setRealmKey(realmKey)
    adapterConfig.setAuthServerUrl(authServerUrl(session).get)
    adapterConfig
  }
}

class AuthorizeActionBuilder(attributes: AuthorizeAttributes) extends ActionBuilder {
  def newInstance(attributes: AuthorizeAttributes) = new AuthorizeActionBuilder(attributes)

  def sslRequired(sslRequired: SslRequired) = newInstance(attributes.copy(sslRequired = sslRequired))
  def resource(resource: String) = newInstance(attributes.copy(resource = resource))
  def clientCredentials(password: String) = newInstance(attributes.copy(password = password))
  def realm(realm: String) = newInstance(attributes.copy(realm = realm))
  def realmKey(realmKey: String) = newInstance(attributes.copy(realmKey = realmKey))
  def authServerUrl(authServerUrl: Expression[String]) = newInstance(attributes.copy(authServerUrl = authServerUrl))

  override def build(next: ActorRef, protocols: Protocols): ActorRef = {
    actor(actorName("authorize"))(new AuthorizeAction(attributes, next))
  }
}

object AuthorizeAction {
  val logger = Logger.getLogger(classOf[AuthorizeAction])
}

class AuthorizeAction(
                       attributes: AuthorizeAttributes,
                       val next: ActorRef
                     ) extends Interruptable with ExitOnFailure with DataWriterClient {
  override def executeOrFail(session: Session): Validation[_] = {
    val facade = new MockHttpFacade()
    val deployment = KeycloakDeploymentBuilder.build(attributes.toAdapterConfig(session));
    facade.request.setURI(attributes.uri(session).get);
    facade.request.setCookies(attributes.cookies(session).get.map(c => (c.getName, c)).toMap.asJava)
    var nextSession = session
    val requestAuth: MockRequestAuthenticator = session(MockRequestAuthenticator.KEY).asOption[MockRequestAuthenticator] match {
      case Some(ra) => ra
      case None =>
        val tmp = new MockRequestAuthenticator(facade, deployment, new MockTokenStore, -1, session.userId)
        nextSession = session.set(MockRequestAuthenticator.KEY, tmp)
        tmp
    }

    Blocking(() => {
      AuthorizeAction.logger.debugf("%s: Authenticating %s%n", new SimpleDateFormat("HH:mm:ss,SSS").format(new Date()).asInstanceOf[Any], session("username").as[Any], Unit)
      Stopwatch(() => requestAuth.authenticate())
        .check(result => result == AuthOutcome.AUTHENTICATED, result => {
          AuthorizeAction.logger.warnf("%s: Failed auth %s%n", new SimpleDateFormat("HH:mm:ss,SSS").format(new Date()).asInstanceOf[Any], session("username").as[Any], Unit)
          result.toString
        })
        .recordAndContinue(AuthorizeAction.this, nextSession, attributes.requestName(session).get)
    })
  }
} 
Example 35
Source File: SessionServlet.scala    From jboss-wildfly-test   with Apache License 2.0 5 votes vote down vote up
package servlet

import java.text.SimpleDateFormat
import java.util.Date
import javax.servlet.annotation._
import javax.servlet.http._

@WebServlet(value = Array("/SessionServlet"))
class SessionServlet extends HttpServlet {

  def formatTime(timestamp: Long): String = {
    val sdf = new SimpleDateFormat("yyyy-MM-dd'T'hh:mm:ss.SSS")
    sdf.format(new Date(timestamp))
  }

  override def doGet(request: HttpServletRequest, response: HttpServletResponse) {
    response.setContentType("text/html")
    response.setCharacterEncoding("UTF-8")

    val out = response.getWriter
    out.println("<h3>Session Test Example</h3>")

    val session = request.getSession(true)
    out.println(
      s"""
        |Session Id: ${session.getId} <br/>
        |Created: ${formatTime(session.getCreationTime)} <br/>
        |Last Accessed: ${formatTime(session.getLastAccessedTime)} <br/>
      """.stripMargin)

    Option(request.getParameter("dataname")).foreach { dataName ⇒
      Option(request.getParameter("datavalue")).foreach { dataValue ⇒
        session.setAttribute(dataName, dataValue);
      }
    }

    import scala.collection.JavaConversions._
    val xs = session.getAttributeNames
    val sessionDataString = xs.map(name ⇒ s"$name = ${session.getAttribute(name)}").mkString("<br/>")
    out.println(
      s"""
        |<p>
        |The following data is in your session: <br/><br/>
        |$sessionDataString
        |</p>
        |
        |<p>
        |POST based form <br/>
        |<form action='${response.encodeURL("SessionServlet")}' method='post'>
        | Name of session attribute: <input type='text' size='20' name='dataname'/><br/>
        | Value of session attribute: <input type='text' size='20' name='datavalue'/><br/>
        | <input type='submit'/>
        |</form>
        |</p>
        |
        |<p>
        |GET based form <br/>
        |<form action='${response.encodeURL("SessionServlet")}' method='get'>
        | Name of session attribute: <input type='text' size='20' name='dataname'/><br/>
        | Value of session attribute: <input type='text' size='20' name='datavalue'/><br/>
        | <input type='submit'/>
        |</form>
        |</p>
        |
        |<p><a href='${response.encodeURL("SessionServlet?dataname=foo&datavalue=bar")}'>URL encoded</a>
      """.stripMargin)

    out.close()
  }

  override def doPost(req: HttpServletRequest, resp: HttpServletResponse): Unit =
    doGet(req, resp)
} 
Example 36
Source File: SimpleFormatter.scala    From scala-js-java-logging   with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
package java.util.logging

import java.util.Date

class SimpleFormatter extends Formatter {
  // The default format is implementation specific
  private val defaultFmt = "[%4$s] %1s - %3$s - %5$s"

  def format(record: LogRecord): String = {
    // As per spec we check the property or use a default
    val fmt =
      System.getProperty("java.util.logging.SimpleFormatter.format", defaultFmt)

    String.format(fmt, new Date(record.getMillis),
        Option(record.getSourceClassName).getOrElse(""),
        Option(record.getLoggerName).getOrElse(""),
        record.getLevel,
        formatMessage(record),
        record.getThrown)
  }
} 
Example 37
Source File: Total.scala    From akka_streams_tutorial   with MIT License 5 votes vote down vote up
package sample.stream_actor

import java.text.SimpleDateFormat
import java.util.{Date, TimeZone}

import akka.Done
import akka.actor.Actor
import sample.stream_actor.Total.Increment

object Total {
  case class Increment(value: Long, avg: Double, id: String)
}

class Total extends Actor {
  var total: Long = 0

  override def receive: Receive = {
    case Increment(value, avg, id) =>
      println(s"Received $value new measurements from turbine with id: $id -  Avg wind speed is: $avg")
      total = total + value

      val date = new Date()
      val df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
      df.setTimeZone(TimeZone.getTimeZone("Europe/Zurich"))

      println(s"${df.format(date) } - Current total of all measurements: $total")
      sender ! Done
  }
} 
Example 38
Source File: RowUtilsTest.scala    From eel-sdk   with Apache License 2.0 5 votes vote down vote up
package io.eels

import java.util.Date

import io.eels.schema.{BooleanType, DateType, DoubleType, Field, IntType, StringType, StructType}
import org.scalatest.{Matchers, WordSpec}

class RowUtilsTest extends WordSpec with Matchers {
  "RowUtils.rowAlign" should {
    "rowAlign should reorder in line with target schema" in {
      val row = Row(StructType(Field("a"), Field("b"), Field("c")), "aaaa", "bbb", "ccc")
      val targetSchema = StructType(Field("c"), Field("b"))
      RowUtils.rowAlign(row, targetSchema) shouldBe Row(StructType(Field("c"), Field("b")), "ccc", "bbb")
    }
    "rowAlign should lookup missing data" in {
      val row = Row(StructType(Field("a"), Field("b"), Field("c")), "aaaa", "bbb", "ccc")
      val targetSchema = StructType(Field("c"), Field("d"))
      RowUtils.rowAlign(row, targetSchema, Map("d" -> "ddd")) shouldBe Row(StructType(Field("c"), Field("d")), "ccc", "ddd")
    }
    "rowAlign should throw an error if a field is missing" in {
      val row = Row(StructType(Field("a"), Field("b"), Field("c")), "aaaa", "bbb", "ccc")
      val targetSchema = StructType(Field("c"), Field("d"))
      intercept[RuntimeException] {
        RowUtils.rowAlign(row, targetSchema)
      }
    }
  }

  "RowUtils.coerce" should {
    "coerce values to match types" in {
      val schema = StructType(Field("a", StringType), Field("b", DoubleType), Field("c", BooleanType), Field("d", DateType), Field("e", IntType.Signed))
      val row = Row(schema, Vector(1, "1.4", "true", "1123123244493", "1"))
      RowUtils.coerce(row) shouldBe Row(schema, "1", 1.4D, true, new Date(1123123244493L), 1)
    }
  }
} 
Example 39
Source File: SftpStore.scala    From fs2-blobstore   with Apache License 2.0 5 votes vote down vote up
package blobstore
package sftp

import java.util.Date

import com.jcraft.jsch._
import cats.instances.option._

import scala.util.Try
import java.io.OutputStream

import cats.Traverse
import cats.effect.{Blocker, ConcurrentEffect, ContextShift, IO, Resource}
import cats.effect.concurrent.{MVar, Semaphore}
import fs2.concurrent.Queue

final class SftpStore[F[_]](
  absRoot: String,
  session: Session,
  blocker: Blocker,
  mVar: MVar[F, ChannelSftp],
  semaphore: Option[Semaphore[F]],
  connectTimeout: Int
)(implicit F: ConcurrentEffect[F], CS: ContextShift[F]) extends Store[F] {
  import implicits._

  import Path.SEP

  private val openChannel: F[ChannelSftp] = {
    val openF = blocker.delay{
      val ch = session.openChannel("sftp").asInstanceOf[ChannelSftp]
      ch.connect(connectTimeout)
      ch
    }
    semaphore.fold(openF){s =>
      F.ifM(s.tryAcquire)(openF, getChannel)
    }
  }

  private val getChannel = F.flatMap(mVar.tryTake) {
    case Some(channel) => F.pure(channel)
    case None => openChannel
  }

  private def channelResource: Resource[F, ChannelSftp] = Resource.make{
    getChannel
  }{
    case ch if ch.isClosed => F.unit
    case ch => F.ifM(mVar.tryPut(ch))(F.unit, SftpStore.closeChannel(semaphore, blocker)(ch))
  }

  
  def apply[F[_]](
    absRoot: String,
    fa: F[Session],
    blocker: Blocker,
    maxChannels: Option[Long] = None,
    connectTimeout: Int = 10000
  )(implicit F: ConcurrentEffect[F], CS: ContextShift[F]): fs2.Stream[F, SftpStore[F]] =
    if (maxChannels.exists(_ < 1)) {
      fs2.Stream.raiseError[F](new IllegalArgumentException(s"maxChannels must be >= 1"))
    } else {
      for {
        session <- fs2.Stream.bracket(fa)(session => F.delay(session.disconnect()))
        semaphore <- fs2.Stream.eval(Traverse[Option].sequence(maxChannels.map(Semaphore.apply[F])))
        mVar <- fs2.Stream.bracket(MVar.empty[F, ChannelSftp])(mVar => F.flatMap(mVar.tryTake)(_.fold(F.unit)(closeChannel[F](semaphore, blocker))))
      } yield new SftpStore[F](absRoot, session, blocker, mVar, semaphore, connectTimeout)
    }

  private def closeChannel[F[_]](semaphore: Option[Semaphore[F]], blocker: Blocker)(ch: ChannelSftp)(implicit F: ConcurrentEffect[F], CS: ContextShift[F]): F[Unit] =
    F.productR(semaphore.fold(F.unit)(_.release))(blocker.delay(ch.disconnect()))
} 
Example 40
Source File: GcsStore.scala    From fs2-blobstore   with Apache License 2.0 5 votes vote down vote up
package blobstore.gcs

import java.nio.channels.Channels
import java.time.Instant
import java.util.Date

import blobstore.{Path, Store}
import cats.effect.{Blocker, ContextShift, Sync}
import com.google.api.gax.paging.Page
import com.google.cloud.storage.{Acl, Blob, BlobId, BlobInfo, Storage}
import com.google.cloud.storage.Storage.{BlobListOption, CopyRequest}
import fs2.{Chunk, Pipe, Stream}

import scala.jdk.CollectionConverters._

final class GcsStore[F[_]](storage: Storage, blocker: Blocker, acls: List[Acl] = Nil)(implicit F: Sync[F], CS: ContextShift[F]) extends Store[F] {

  private def _chunk(pg: Page[Blob]): Chunk[Path] = {
    val (dirs, files) = pg.getValues.asScala.toSeq.partition(_.isDirectory)
    val dirPaths = Chunk.seq(dirs.map(b => Path(root = b.getBucket, key = b.getName.stripSuffix("/"), size = None, isDir = true, lastModified = None)))
    val filePaths = Chunk.seq(files.map{b =>
      val size = Option(b.getSize: java.lang.Long).map(_.toLong) // Prevent throwing NPE (see https://github.com/scala/bug/issues/9634)
      val lastModified = Option(b.getUpdateTime: java.lang.Long).map(millis => Date.from(Instant.ofEpochMilli(millis))) // Prevent throwing NPE (see https://github.com/scala/bug/issues/9634)
      Path(b.getBucket, key = b.getName, size = size, isDir = false, lastModified = lastModified)
    })
    Chunk.concat(List(dirPaths, filePaths))
  }

  def list(path: Path): fs2.Stream[F, Path] = {
    Stream.unfoldChunkEval[F, () => Option[Page[Blob]], Path]{
      () => Some(storage.list(path.root, BlobListOption.currentDirectory(), BlobListOption.prefix(path.key)))
    }{getPage =>
      blocker.delay{
        getPage().map{pg =>
          if (pg.hasNextPage){
            (_chunk(pg), () => Some(pg.getNextPage))
          } else {
            (_chunk(pg), () => None)
          }
        }
      }
    }
  }

  def get(path: Path, chunkSize: Int): fs2.Stream[F, Byte] = {
    val is = blocker.delay(Channels.newInputStream(storage.get(path.root, path.key).reader()))
    fs2.io.readInputStream(is, chunkSize, blocker, closeAfterUse = true)
  }

  def put(path: Path): Pipe[F, Byte, Unit] = {
    val fos = Sync[F].delay{
      val builder = {
        val b = BlobInfo.newBuilder(path.root, path.key)
        if (acls.nonEmpty) b.setAcl(acls.asJava) else b
      }
      val blobInfo = builder.build()
      val writer = storage.writer(blobInfo)
      Channels.newOutputStream(writer)
    }
    fs2.io.writeOutputStream(fos, blocker, closeAfterUse = true)
  }

  def move(src: Path, dst: Path): F[Unit] = F.productR(copy(src, dst))(remove(src))

  def copy(src: Path, dst: Path): F[Unit] = {
    val req = CopyRequest.newBuilder().setSource(src.root, src.key).setTarget(BlobId.of(dst.root, dst.key)).build()
    F.void(blocker.delay(storage.copy(req).getResult))
  }

  def remove(path: Path): F[Unit] =
    F.void(blocker.delay(storage.delete(path.root, path.key)))
}


object GcsStore{
  def apply[F[_]](
    storage: Storage,
    blocker: Blocker,
    acls: List[Acl]
  )(implicit F: Sync[F], CS: ContextShift[F]): GcsStore[F] = new GcsStore(storage, blocker, acls)
} 
Example 41
Source File: FileStore.scala    From fs2-blobstore   with Apache License 2.0 5 votes vote down vote up
package blobstore
package fs

import java.nio.file.{Files, Paths, Path => NioPath}
import java.util.Date

import scala.jdk.CollectionConverters._
import cats.implicits._
import cats.effect.{Blocker, ContextShift, Sync}
import fs2.{Pipe, Stream}

final class FileStore[F[_]](fsroot: NioPath, blocker: Blocker)(implicit F: Sync[F], CS: ContextShift[F]) extends Store[F] {
  val absRoot: String = fsroot.toAbsolutePath.normalize.toString

  override def list(path: Path): fs2.Stream[F, Path] = {
    val isDir = Stream.eval(F.delay(Files.isDirectory(path)))
    val isFile = Stream.eval(F.delay(Files.exists(path)))

    val files = Stream.eval(F.delay(Files.list(path)))
      .flatMap(x => Stream.fromIterator(x.iterator.asScala))
      .evalMap(x => F.delay(
        Path(x.toAbsolutePath.toString.replaceFirst(absRoot, "")).copy(
          size = Option(Files.size(x)),
          isDir = Files.isDirectory(x),
          lastModified = Option(new Date(Files.getLastModifiedTime(path).toMillis))
        )
      ))

    val file = fs2.Stream.eval {
      F.delay {
        path.copy(
          size = Option(Files.size(path)),
          lastModified = Option(new Date(Files.getLastModifiedTime(path).toMillis))
        )
      }
    }

    isDir.ifM(files, isFile.ifM(file, Stream.empty.covaryAll[F, Path]))
  }

  override def get(path: Path, chunkSize: Int): fs2.Stream[F, Byte] = fs2.io.file.readAll[F](path, blocker, chunkSize)

  override def put(path: Path): Pipe[F, Byte, Unit] = { in =>
    val mkdir = Stream.eval(F.delay(Files.createDirectories(_toNioPath(path).getParent)).as(true))
    mkdir.ifM(
      fs2.io.file.writeAll(path, blocker).apply(in),
      Stream.raiseError[F](new Exception(s"failed to create dir: $path"))
    )
  }

  override def move(src: Path, dst: Path): F[Unit] = F.delay {
    Files.createDirectories(_toNioPath(dst).getParent)
    Files.move(src, dst)
  }.void

  override def copy(src: Path, dst: Path): F[Unit] = {
    F.delay {
      Files.createDirectories(_toNioPath(dst).getParent)
      Files.copy(src, dst)
    }.void
  }

  override def remove(path: Path): F[Unit] = F.delay({
    Files.deleteIfExists(path)
    ()
  })

  implicit private def _toNioPath(path: Path): NioPath =
    Paths.get(absRoot, path.root, path.key)

}

object FileStore{
  def apply[F[_]](fsroot: NioPath, blocker: Blocker)(implicit F: Sync[F], CS: ContextShift[F]): FileStore[F] = new FileStore(fsroot, blocker)
} 
Example 42
Source File: Worker.scala    From EncryCore   with GNU General Public License v3.0 5 votes vote down vote up
package encry.local.miner

import java.util.Date

import akka.actor.{Actor, ActorRef}
import encry.EncryApp._

import scala.concurrent.duration._
import encry.consensus.{CandidateBlock, ConsensusSchemeReaders}
import encry.local.miner.Miner.MinedBlock
import encry.local.miner.Worker.{MineBlock, NextChallenge}
import java.text.SimpleDateFormat

import com.typesafe.scalalogging.StrictLogging
import org.encryfoundation.common.utils.constants.TestNetConstants

class Worker(myIdx: Int, numberOfWorkers: Int, miner: ActorRef) extends Actor with StrictLogging {

  val sdf: SimpleDateFormat = new SimpleDateFormat("HH:mm:ss")
  var challengeStartTime: Date = new Date(System.currentTimeMillis())

  val initialNonce: Long = Long.MaxValue / numberOfWorkers * myIdx

  override def preRestart(reason: Throwable, message: Option[Any]): Unit =
    logger.warn(s"Worker $myIdx is restarting because of: $reason")

  override def receive: Receive = {
    case MineBlock(candidate: CandidateBlock, nonce: Long) =>
      logger.info(s"Trying nonce: $nonce. Start nonce is: $initialNonce. " +
        s"Iter qty: ${nonce - initialNonce + 1} on worker: $myIdx with diff: ${candidate.difficulty}")
      ConsensusSchemeReaders
        .consensusScheme.verifyCandidate(candidate, nonce)
        .fold(
          e => {
            self ! MineBlock(candidate, nonce + 1)
            logger.info(s"Mining failed cause: $e")
          },
          block => {
            logger.info(s"New block is found: (${block.header.height}, ${block.header.encodedId}, ${block.payload.txs.size} " +
              s"on worker $self at ${sdf.format(new Date(System.currentTimeMillis()))}. Iter qty: ${nonce - initialNonce + 1}")
            miner ! MinedBlock(block, myIdx)
          })
    case NextChallenge(candidate: CandidateBlock) =>
      challengeStartTime = new Date(System.currentTimeMillis())
      logger.info(s"Start next challenge on worker: $myIdx at height " +
        s"${candidate.parentOpt.map(_.height + 1).getOrElse(TestNetConstants.PreGenesisHeight.toString)} at ${sdf.format(challengeStartTime)}")
      self ! MineBlock(candidate, Long.MaxValue / numberOfWorkers * myIdx)
  }

}

object Worker {

  case class NextChallenge(candidateBlock: CandidateBlock)

  case class MineBlock(candidateBlock: CandidateBlock, nonce: Long)

} 
Example 43
Source File: PartitionDateFormatterTest.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.partitioning

import java.util.Date

import org.joda.time.DateTime
import org.scalatest.flatspec.AnyFlatSpecLike
import org.scalatest.matchers.should.Matchers

class PartitionDateFormatterTest extends AnyFlatSpecLike with Matchers {

  val expectedResult = "2017-12-31"
  val inputTimestamp = 1514709779000L

  it should "parse timestamp to partition date" in {
    PartitionDateFormatter.dateFormat(inputTimestamp) should be(expectedResult)
  }
  it should "parse joda date time to partition date" in {
    PartitionDateFormatter.dateFormat(new DateTime(inputTimestamp)) should be(expectedResult)
  }
  it should "parse java date to partition date" in {
    PartitionDateFormatter.dateFormat(new Date(inputTimestamp)) should be(expectedResult)
  }
} 
Example 44
Source File: SearchManagementRepository.scala    From smui   with Apache License 2.0 5 votes vote down vote up
package models

import java.io.FileInputStream
import java.time.LocalDateTime
import java.util.UUID
import java.util.Date

import anorm.SqlParser.get
import javax.inject.Inject
import anorm._
import models.FeatureToggleModel.FeatureToggleService
import models.SearchInput.ID
import play.api.db.DBApi

@javax.inject.Singleton
class SearchManagementRepository @Inject()(dbapi: DBApi, toggleService: FeatureToggleService)(implicit ec: DatabaseExecutionContext) {

  private val db = dbapi.database("default")

  // On startup, always sync predefined tags with the DB
  syncPredefinedTagsWithDB()

  private def syncPredefinedTagsWithDB(): Unit = {
    db.withTransaction { implicit connection =>
      if (toggleService.isRuleTaggingActive) {
        for (fileName <- toggleService.predefinedTagsFileName) {
          val tags = PredefinedTag.fromStream(new FileInputStream(fileName))
          PredefinedTag.updateInDB(tags)
        }
      }
    }
  }

  
  def addNewSearchInput(solrIndexId: SolrIndexId, searchInputTerm: String, tags: Seq[InputTagId]): SearchInputId = db.withConnection { implicit connection =>
    val id = SearchInput.insert(solrIndexId, searchInputTerm).id
    if (tags.nonEmpty) {
      TagInputAssociation.updateTagsForSearchInput(id, tags)
    }
    id
  }

  def getDetailedSearchInput(searchInputId: SearchInputId): Option[SearchInputWithRules] = db.withConnection { implicit connection =>
    SearchInputWithRules.loadById(searchInputId)
  }

  def updateSearchInput(searchInput: SearchInputWithRules): Unit = db.withTransaction { implicit connection =>
    SearchInputWithRules.update(searchInput)
  }

  def deleteSearchInput(searchInputId: String): Int = db.withTransaction { implicit connection =>
    SearchInputWithRules.delete(SearchInputId(searchInputId))
  }

  def listAllSuggestedSolrFields(solrIndexId: String): List[SuggestedSolrField] = db.withConnection { implicit connection =>
    SuggestedSolrField.listAll(SolrIndexId(solrIndexId))
  }

  def addNewSuggestedSolrField(solrIndexId: SolrIndexId, suggestedSolrFieldName: String): SuggestedSolrField = db.withConnection { implicit connection =>
    SuggestedSolrField.insert(solrIndexId, suggestedSolrFieldName)
  }

  def addNewDeploymentLogOk(solrIndexId: String, targetPlatform: String): Boolean = db.withConnection { implicit connection =>
    SQL("insert into deployment_log(id, solr_index_id, target_platform, last_update, result) values ({id}, {solr_index_id}, {target_platform}, {last_update}, {result})")
      .on(
        'id -> UUID.randomUUID().toString,
        'solr_index_id -> solrIndexId,
        'target_platform -> targetPlatform,
        'last_update -> new Date(),
        'result -> 0
      )
      .execute()
  }

  case class DeploymentLogDetail(id: String, lastUpdate: LocalDateTime, result: Int)

  val sqlParserDeploymentLogDetail: RowParser[DeploymentLogDetail] = {
    get[String](s"deployment_log.id") ~
      get[LocalDateTime](s"deployment_log.last_update") ~
      get[Int](s"deployment_log.result") map { case id ~ lastUpdate ~ result =>
      DeploymentLogDetail(id, lastUpdate, result)
    }
  }

  def lastDeploymentLogDetail(solrIndexId: String, targetPlatform: String): Option[DeploymentLogDetail] = db.withConnection {
    implicit connection => {
      SQL"select * from deployment_log where solr_index_id = $solrIndexId and target_platform = $targetPlatform order by last_update desc".as(sqlParserDeploymentLogDetail.*).headOption
    }
  }

} 
Example 45
Source File: DefaultUsers.scala    From meteorite-core   with Apache License 2.0 5 votes vote down vote up
package bi.meteorite.core.security.hibernate

import bi.meteorite.core.api.objects.Event
import bi.meteorite.core.api.objects.MeteoriteUser
import bi.meteorite.core.api.objects.MeteoriteRole
import bi.meteorite.core.api.persistence.EventService
import bi.meteorite.core.api.persistence.UserService
import bi.meteorite.objects.EventImpl
import bi.meteorite.objects.RoleImpl
import bi.meteorite.objects.UserImpl
import java.util.Date
import java.util.UUID
import javax.annotation.PostConstruct
import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer


class DefaultUsers {
  private var userService: UserService = null
  private var eventService: EventService = null

  @PostConstruct def insertUsers() {
    if (eventService.getEventByEventName("Start Adding Users") == null) {
      val uuid: String = UUID.randomUUID.toString
      val e: Event = eventService.addEvent(new EventImpl(uuid, this.getClass.getName, "Start Adding users", "Adding users to user list", new Date))
      var u: MeteoriteUser = new UserImpl
      u.setUsername("admin")
      u.setPassword("admin")
      val r: MeteoriteRole = new RoleImpl
      r.setUserId(u)
      r.setRole("ROLE_ADMIN")
      val r2: MeteoriteRole = new RoleImpl
      r2.setUserId(u)
      r2.setRole("ROLE_USER")
      val l = ListBuffer[MeteoriteRole](r, r2)
      u.setRoles(l.asJava)
      u = userService.addUser(u)

      u = new UserImpl
      u.setUsername("smith")
      u.setPassword("smith")
      val s2 = List[MeteoriteRole](new RoleImpl("ROLE_USER", u.asInstanceOf[UserImpl]))
      u.setRoles(s2.asJava)
      userService.addUser(u)
      e.setEndDate(new Date)
      e.setDuration(e.getEndDate.getTime - e.getStartDate.getTime)
      eventService.updateEvent(e)
    }
  }

  def setUserService(userService: UserService) {
    this.userService = userService
  }

  def setEventService(eventService: EventService) {
    this.eventService = eventService
  }
} 
Example 46
Source File: IOUtils.scala    From watr-works   with Apache License 2.0 5 votes vote down vote up
package edu.umass.cs.iesl.watr
package utils

object PathUtils {

  import ammonite.{ops => fs}

  import java.nio.{file => nio}

  def appendTimestamp(path: String): String = {
    import java.text.SimpleDateFormat
    import java.util.Date
    val dateStamp = new SimpleDateFormat("yyyyMMddhhmmss").format(new Date())
    s"$path-$dateStamp"
  }

  def nioToAmm(nioPath: nio.Path): fs.Path = {
    fs.FilePath(nioPath) match {
      case p: fs.Path =>  p
      case p: fs.RelPath => fs.pwd / p
      case _ => ???
    }
  }

  def strToAmmPath(str: String): fs.Path = {
    fs.FilePath(str) match {
      case p: fs.Path =>  p
      case p: fs.RelPath => fs.pwd / p
      case _ => ???
    }
  }

  implicit class RicherPathUtils_String(val self: String) extends AnyVal {

    def toPath(): fs.Path = {
      strToAmmPath(self)
    }
  }

  implicit class RicherPathUtils_NioPath(val self: nio.Path) extends AnyVal {

    def toFsPath(): fs.Path = {
      nioToAmm(self)
    }
  }

} 
Example 47
Source File: GenericMainClass.scala    From darwin   with Apache License 2.0 5 votes vote down vote up
package it.agilelab.darwin.app.spark

import java.text.SimpleDateFormat
import java.util.Date

import com.typesafe.config.{Config, ConfigFactory}
import org.apache.hadoop.fs.FileSystem
import org.apache.spark.sql.SparkSession
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.JavaConverters._

trait GenericMainClass {
  self: SparkManager =>

  val genericMainClassLogger: Logger = LoggerFactory.getLogger("SparkManager")

  private def makeFileSystem(session: SparkSession): FileSystem = {
    if (session.sparkContext.isLocal) {
      FileSystem.getLocal(session.sparkContext.hadoopConfiguration)
    }
    else {
      FileSystem.get(session.sparkContext.hadoopConfiguration)
    }
  }


  
  // scalastyle:off
  private def getGlobalConfig: Config = {
    genericMainClassLogger.debug("system environment vars")
    for ((k, v) <- System.getenv().asScala.toSeq.sortBy(_._1)) genericMainClassLogger.debug(s"$k -> $v")

    genericMainClassLogger.debug("system properties")
    for ((k, v) <- System.getProperties.asScala.toSeq.sortBy(_._1)) genericMainClassLogger.debug(s"$k -> $v")

    ConfigFactory.load()
  }

  // scalastyle:on

} 
Example 48
Source File: SQLAppStatusStore.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.ui

import java.lang.{Long => JLong}
import java.util.Date

import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer

import com.fasterxml.jackson.annotation.JsonIgnore
import com.fasterxml.jackson.databind.annotation.JsonDeserialize

import org.apache.spark.JobExecutionStatus
import org.apache.spark.status.KVUtils.KVIndexParam
import org.apache.spark.util.kvstore.{KVIndex, KVStore}


class SparkPlanGraphNodeWrapper(
    val node: SparkPlanGraphNode,
    val cluster: SparkPlanGraphClusterWrapper) {

  def toSparkPlanGraphNode(): SparkPlanGraphNode = {
    assert(node == null ^ cluster == null, "One and only of of nore or cluster must be set.")
    if (node != null) node else cluster.toSparkPlanGraphCluster()
  }

}

case class SQLPlanMetric(
    name: String,
    accumulatorId: Long,
    metricType: String) 
Example 49
Source File: ExecutorNumListener.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.monitor

import java.text.SimpleDateFormat
import java.util
import java.util.Date
import java.util.concurrent.atomic.AtomicBoolean

import com.fasterxml.jackson.annotation.JsonIgnore

import org.apache.spark.SparkContext
import org.apache.spark.internal.Logging
import org.apache.spark.scheduler.{
  SparkListener,
  SparkListenerExecutorAdded,
  SparkListenerExecutorRemoved
}
import org.apache.spark.util.kvstore.KVIndex

class ExecutorNumListener extends SparkListener with Logging {

  lazy val kvstore = SparkContext.getActive.get.statusStore.store
  var initialized: AtomicBoolean = new AtomicBoolean(false)
  var lastPointTime: Long = new Date().getTime
  var recentEventTime: Long = new Date().getTime
  private val liveExecutors = new util.HashSet[String]()

  def initialize(): Unit = {
    SparkContext.getActive.map(_.ui).flatten.foreach {
      case ui =>
        ui.attachTab(new ExecutorNumTab(ui))
        ui.addStaticHandler("static", "/static/special")
    }
  }

  def maybeAddPoint(time: Long): Unit = {
    if (!initialized.get) {
      initialize()
      initialized.compareAndSet(false, true)
    }
    if (time - lastPointTime > 20 * 1000) {
      addPoint(recentEventTime)
      addPoint(time)
      lastPointTime = time
    }
    recentEventTime = time
  }
  def addPoint(time: Long): Unit = {
    val executorNum = liveExecutors.size
    kvstore.write(new ExecutorNumWrapper(new ExecutorNum(
      s"own ${executorNum} executors at ${new SimpleDateFormat("HH:mm:ss").format(new Date(time))}",
      IndexedSeq(time, executorNum))))
  }

  override def onExecutorAdded(event: SparkListenerExecutorAdded): Unit = {
    liveExecutors.add(event.executorId)
    maybeAddPoint(event.time)
  }

  override def onExecutorRemoved(event: SparkListenerExecutorRemoved): Unit = {
    liveExecutors.remove(event.executorId)
    maybeAddPoint(event.time)
  }

}

private[spark] class ExecutorNumWrapper(val point: ExecutorNum) {
  @JsonIgnore @KVIndex
  def id: Long = point.value(0)
}

private[spark] class ExecutorNum(val name: String, val value: IndexedSeq[Long]) 
Example 50
Source File: ApplicationMonitor.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.monitor.application

import java.sql.{Connection, Timestamp}
import java.text.SimpleDateFormat
import java.util.Date
import java.util.concurrent.TimeUnit

import scala.concurrent.duration.Duration

import org.apache.spark.alarm.AlertMessage
import org.apache.spark.alarm.AlertType._
import org.apache.spark.monitor.Monitor
import org.apache.spark.monitor.MonitorItem.MonitorItem

abstract class ApplicationMonitor extends Monitor {
  override val alertType = Seq(Application)
}

class ApplicationInfo(
    title: MonitorItem,
    appName: String,
    appId: String,
    md5: String,
    startTime: Date,
    duration: Long,
    appUiUrl: String,
    historyUrl: String,
    eventLogDir: String,
    minExecutor: Int,
    maxExecutor: Int,
    executorCore: Int,
    executorMemoryMB: Long,
    executorAccu: Double,
    user: String)
  extends AlertMessage(title) {
  override def toCsv(): String = {
    s"${user},${appId}," +
      s"${new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(startTime)}," +
      s"${Duration(duration, TimeUnit.MILLISECONDS).toSeconds}," +
      s"${executorMemoryMB},${executorCore},${executorAccu.formatted("%.2f")},${appName}"
  }
  // scalastyle:off
  override def toHtml(): String = {
    val html = <h1>任务完成! </h1>
        <h2>任务信息 </h2>
        <ul>
          <li>作业名:{appName}</li>
          <li>作业ID:{appId}</li>
          <li>开始时间:{new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(startTime)}</li>
          <li>任务用时:{Duration(duration, TimeUnit.MILLISECONDS).toSeconds} s</li>
        </ul>
        <h2>资源用量</h2>
        <ul>
          <li>Executor个数:{minExecutor}~{maxExecutor}</li>
          <li>Executor内存:{executorMemoryMB} MB</li>
          <li>Executor核数:{executorCore}</li>
          <li>Executor累积用量:{executorAccu.formatted("%.2f")} executor*min</li>
        </ul>
        <h2>调试信息</h2>
        <ul>
          <li>回看链接1:<a href={appUiUrl.split(",").head}>{appUiUrl.split(",").head}</a></li>
          <li>回看链接2:<a href={historyUrl}>{historyUrl}</a></li>
          <li>日志文件所在目录:{eventLogDir}</li>
        </ul>
    html.mkString
  }

  override def toJdbc(conn: Connection, appId: String): Unit = {
    val query = "INSERT INTO `xsql_monitor`.`spark_history`(" +
      "`user`, `md5`, `appId`, `startTime`, `duration`, " +
      "`yarnURL`, `sparkHistoryURL`, `eventLogDir`, `coresPerExecutor`, `memoryPerExecutorMB`," +
      " `executorAcc`, `appName`, `minExecutors`, `maxExecutors`)" +
      " SELECT ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? FROM DUAL" +
      " WHERE NOT EXISTS (SELECT * FROM `xsql_monitor`.`spark_history` WHERE `appId` = ?);"

    val preparedStmt = conn.prepareStatement(query)
    preparedStmt.setString(1, user)
    preparedStmt.setString(2, md5)
    preparedStmt.setString(3, appId)
    preparedStmt.setTimestamp(4, new Timestamp(startTime.getTime))
    preparedStmt.setLong(5, Duration(duration, TimeUnit.MILLISECONDS).toSeconds)
    preparedStmt.setString(6, appUiUrl)
    preparedStmt.setString(7, historyUrl)
    preparedStmt.setString(8, eventLogDir)
    preparedStmt.setInt(9, executorCore)
    preparedStmt.setLong(10, executorMemoryMB)
    preparedStmt.setDouble(11, executorAccu)
    preparedStmt.setString(12, appName)
    preparedStmt.setInt(13, minExecutor)
    preparedStmt.setInt(14, maxExecutor)
    preparedStmt.setString(15, appId)
    preparedStmt.execute
  }
} 
Example 51
Source File: SQLMonitor.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.monitor.sql

import java.text.SimpleDateFormat
import java.util.Date
import java.util.concurrent.TimeUnit

import scala.concurrent.duration.Duration

import org.apache.spark.alarm.AlertMessage
import org.apache.spark.alarm.AlertType._
import org.apache.spark.monitor.Monitor
import org.apache.spark.monitor.MonitorItem.MonitorItem

abstract class SQLMonitor extends Monitor {
  override val alertType = Seq(SQL)

}

class SQLInfo(
    title: MonitorItem,
    sqlId: String,
    aeFlag: Boolean,
    appId: String,
    executionId: Long,
    submissionTime: Date,
    duration: Long)
  extends AlertMessage(title) {
  override def toCsv(): String = {
    s"${sqlId},${aeFlag},${appId},${executionId}," +
      s"${new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(submissionTime)}," +
      s"${Duration(duration, TimeUnit.MILLISECONDS).toSeconds}"
  }

} 
Example 52
Source File: ExecutorNumMonitor.scala    From XSQL   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.monitor.executor

import java.io.File
import java.util.Date

import scala.xml._

import org.apache.spark.alarm.{AlertMessage, EmailAlarm, HtmlMessage}
import org.apache.spark.monitor.{Monitor, MonitorItem}
import org.apache.spark.monitor.MonitorItem.MonitorItem
import org.apache.spark.painter.TimeSeriesChartPainter
import org.apache.spark.scheduler._
import org.apache.spark.status.ExecutorSummaryWrapper

class ExecutorNumMonitor extends ExecutorMonitor {
  override val item: MonitorItem = MonitorItem.EXECUTOR_NUM_NOTIFIER
  lazy val dataPath = s"/tmp/${item}-${conf.get("spark.app.id")}.csv"
  lazy val picturePath = s"/tmp/${item}-${conf.get("spark.app.id")}.jpg"
  lazy val eventMinInterval =
    conf.getTimeAsMs(s"${Monitor.PREFIX}.${item.toString.toLowerCase}.granularity", "60s")
  var lastPointTime: Long = new Date().getTime
  var recentEventTime: Long = new Date().getTime

  lazy private val painter = new TimeSeriesChartPainter(dataPath, picturePath)

  def executorNum(): Long = {
    kvStore.count(classOf[ExecutorSummaryWrapper], "active", true)
  }

  def addPoint(executorNum: Long, time: Long): Unit = {
    painter.addPoint(executorNum, recentEventTime)
  }
  // scalastyle:off
  override def watchOut(event: SparkListenerEvent): Option[AlertMessage] = {
    event match {
      case env: SparkListenerExecutorAdded =>
        // try to coarse num change in 60s into one point, so that we can keep graph clean and readable
        if (env.time - lastPointTime > eventMinInterval) {
          addPoint(executorNum, recentEventTime)
          addPoint(executorNum, env.time)
          lastPointTime = env.time
        }
        recentEventTime = env.time
        Option.empty
      case env: SparkListenerExecutorRemoved =>
        if (env.time - lastPointTime > eventMinInterval) {
          addPoint(executorNum, recentEventTime)
          addPoint(executorNum, env.time)
          lastPointTime = env.time
        }
        recentEventTime = env.time
        Option.empty
      case e: SparkListenerApplicationEnd =>
        addPoint(executorNum, recentEventTime)
        addPoint(executorNum, new Date().getTime)
        painter.paint(600, 400, "executor num curve", "datetime", "executor num")
        if (EmailAlarm.get().isDefined) {
          val pic = EmailAlarm.get().get.embed(new File(picturePath))
          val a = <h2>动态调度情况:</h2>
            <img src={"cid:"+pic}></img>
            <br/>
          Option(new HtmlMessage(title = item, content = a.mkString))
        } else {
          Option.empty
        }
    }
  }
  // scalastyle:on
} 
Example 53
Source File: WindowService.scala    From ledger-manager-chrome   with MIT License 5 votes vote down vote up
package co.ledger.manager.web.services

import java.util.Date

import biz.enef.angulate.Module.RichModule
import biz.enef.angulate.Service
import co.ledger.manager.web.controllers.WindowController
import co.ledger.manager.web.core.event.JsEventEmitter
import co.ledger.manager.web.components.SnackBar.SnackBarInstance

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.scalajs.js.timers


class WindowService extends Service {

  def enableUserInterface(): Unit = _userInterfaceEnableListener.foreach(_(true))
  def disableUserInterface(): Unit = _userInterfaceEnableListener.foreach(_(false))

  def onUserInterfaceEnableChanged(handler: (Boolean) => Unit): Unit = {
    _userInterfaceEnableListener = Option(handler)
  }

  private var _userInterfaceEnableListener: Option[(Boolean) => Unit] = None

  // Navigation bar features
  def showNavigationBar(): Unit = {
    if (!_navigationIsVisible) {
      _navigationIsVisible = true
      _navigationBarVisibilityListener.foreach(_(_navigationIsVisible))
    }
  }

  def hideNavigationBar(): Unit = {
    if (_navigationIsVisible) {
      _navigationIsVisible = false
      _navigationBarVisibilityListener.foreach(_(_navigationIsVisible))
    }
  }

  def onNavigationBarVisibilityChanged(handler: (Boolean) => Unit) = {
    _navigationBarVisibilityListener = Option(handler)
  }

  def notifyRefresh(): Unit = {
    if (_refreshHandler.isDefined && !isRefreshing) {
      val start = new Date().getTime
      _refreshing = true
      eventEmitter.emit(StartRefresh())
      _refreshHandler.get() onComplete {
        case all =>
          import timers._
          val now = new Date().getTime
          setTimeout((now - start) % 1000L + 1000L) {
            _refreshing = false
            eventEmitter.emit(StopRefresh())
          }
      }
    }
  }

  def isRefreshing = _refreshing
  private var _refreshing = false

  def bind(windowController: WindowController): Unit = {
    _windowController = windowController
  }

  def onRefreshClicked(handler: () => Future[Unit]): Unit = {
    _refreshHandler = Option(handler)
  }

  private var _navigationBarVisibilityListener: Option[(Boolean) => Unit] = None
  private var _navigationIsVisible = false

  private var _refreshHandler: Option[() => Future[Unit]] = None

  private var _windowController: WindowController = null

  var configureSnackBar: (Int, String, String) => SnackBarInstance = (_, _, _) => null
  var dismissSnackbar: () => Unit = () => ()


  val eventEmitter = new JsEventEmitter

  case class StartRefresh()
  case class StopRefresh()
}

object WindowService {
  def init(module: RichModule) = module.serviceOf[WindowService]("windowService")
} 
Example 54
Source File: ApiDependantController.scala    From ledger-manager-chrome   with MIT License 5 votes vote down vote up
package co.ledger.manager.web.controllers.manager

import java.util.Date

import biz.enef.angulate.Scope
import co.ledger.manager.web.services.ApiService

import scala.concurrent.Future
import scala.scalajs.js
import scala.scalajs.js.timers
import scala.scalajs.js.timers._
import scala.util.{Failure, Success}
import scala.concurrent.ExecutionContext.Implicits.global


trait ApiDependantController {

  val apiService: ApiService
  val $scope: Scope

  private var _loading = false
  def isLoading() = _loading

  def onBeforeRefresh(): Unit = {}

  def onAfterRefresh(): Unit = {}

  def refresh(): Unit = {
    onBeforeRefresh()

    _loading = !apiService.applications.isCompleted

    val startDate = new Date().getTime

    val applyUi = {() =>
      if (_loading) {
        import timers._
        val wait = Math.max(1500, new Date().getTime - startDate)
        setTimeout(wait) {
          _loading = false
          $scope.$apply()
        }
      } else {
        _loading = false
        $scope.$apply()
      }
    }
    apiService.firmwares flatMap {(_) =>
      apiService.applications
    } onComplete {
      case Success(apps) =>
        onAfterRefresh()
        applyUi()
      case Failure(ex) =>
        ex.printStackTrace()
        applyUi()
    }
  }

  def fullRefresh(): Unit = {
    if (!_loading) {
      apiService.refresh()
      refresh()
    }
  }

} 
Example 55
Source File: LeftPanel.scala    From ledger-manager-chrome   with MIT License 5 votes vote down vote up
package co.ledger.manager.web.components

import java.util.Date

import biz.enef.angulate.{Directive, Scope}
import biz.enef.angulate.Module.RichModule
import biz.enef.angulate.core.{Attributes, JQLite, Location}
import co.ledger.manager.web.components.LeftPanel.LeftPanelScope
import co.ledger.manager.web.services.{ApiService, SessionService}
import co.ledger.wallet.core.device.utils.EventReceiver
import org.widok.moment.Moment

import scala.concurrent.duration
import scala.scalajs.js
import scala.scalajs.js.{Dictionary, UndefOr, timers}
import scala.scalajs.js.annotation.ScalaJSDefined


class LeftPanel( $location: Location,
                 $route: js.Dynamic,
                 $parse: js.Dynamic,
                 apiService: ApiService) extends Directive {
  override def templateUrl: String = "./templates/components/left-panel.html"
  override type ControllerType = js.Dynamic
  override type ScopeType = LeftPanel.LeftPanelScope

  val categories = js.Array(
    js.Dynamic.literal(id = "apps", icon = "th-large", titleKey = "common.applications", path = "/old/apps/index/"),
    js.Dynamic.literal(id = "firmwares", icon = "cog", titleKey = "common.firmwares", path = "/old/firmwares/index/")
  )


  override def isolateScope: Dictionary[String] = js.Dictionary[String](
    "onRefresh" -> "&"
  )

  override def postLink(scope: ScopeType, elem: JQLite, attrs: Attributes): Unit = {
    import timers._
    import duration._

    var interval: SetIntervalHandle = null
    val receiver = new EventReceiver {
      override def receive: Receive = {
        case ApiService.UpdateDoneEvent() =>
          scope.lastUpdate = apiService.lastUpdateDate.map {(date) =>
            Moment(date.getTime).fromNow().capitalize
          } getOrElse("Never")
          scope.asInstanceOf[Scope].$digest()
        case ignore =>
      }
    }
    scope.categories = categories
    scope.selected = attrs("selectedCategory")
    scope.navigate = {(path: String) =>
      $location.path(path)
      $route.reload()
    }
    scope.refresh = {() =>
      scope.asInstanceOf[js.Dynamic].onRefresh()
    }
    scope.lastUpdate = apiService.lastUpdateDate.map {(date) =>
      Moment(date.getTime).fromNow().capitalize
    } getOrElse("Never")
    scope.deviceName = SessionService.instance.currentSession.get.device._2.name
    scope.asInstanceOf[Scope].$on("$destroy", {() =>
      apiService.eventEmitter.unregister(receiver)
      if (interval != null)
        clearInterval(interval)
    })
    apiService.eventEmitter.register(receiver)
    interval = setInterval(1.minute) {
      scope.lastUpdate = apiService.lastUpdateDate.map {(date) =>
        Moment(date.getTime).fromNow().capitalize
      } getOrElse("Never")
      scope.asInstanceOf[Scope].$digest()
    }
  }

}

object LeftPanel {

  @ScalaJSDefined
  class LeftPanelScope extends js.Object {
    var selected: UndefOr[String] = _
    var categories: js.Array[js.Object with js.Dynamic] = _
    var navigate: js.Function = _
    var refresh: js.Function = _
    var lastUpdate: String = _
    var deviceName: String = _
  }

  def init(module: RichModule) = module.directiveOf[LeftPanel]("leftPanel")
} 
Example 56
Source File: EventProducerSpec.scala    From reactive-kafka-microservice-template   with Apache License 2.0 5 votes vote down vote up
package akka.kafka

import java.util.Date

import akka.Done
import akka.actor.ActorSystem
import akka.serialization.Serialization
import akka.stream.QueueOfferResult
import akka.stream.QueueOfferResult.Enqueued
import akka.stream.scaladsl.SourceQueueWithComplete
import akka.testkit.{DefaultTimeout, EventFilter, ImplicitSender, TestActorRef, TestKit, TestProbe}
import com.omearac.producers.EventProducer
import com.omearac.shared.AkkaStreams
import com.omearac.shared.EventMessages.{ActivatedProducerStream, MessagesPublished}
import com.omearac.shared.KafkaMessages.ExampleAppEvent
import com.typesafe.config.ConfigFactory
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}

import scala.concurrent.Future


class EventProducerSpec extends TestKit(ActorSystem("EventProducerSpec",ConfigFactory.parseString("""
    akka.loggers = ["akka.testkit.TestEventListener"] """)))
    with DefaultTimeout with ImplicitSender
    with WordSpecLike with Matchers with BeforeAndAfterAll
    with AkkaStreams {

    val testProducer = TestActorRef(new EventProducer)
    val producerActor = testProducer.underlyingActor
    val mockProducerStream: SourceQueueWithComplete[Any] = new SourceQueueWithComplete[Any] {
        override def complete(): Unit = println("complete")

        override def fail(ex: Throwable): Unit = println("fail")

        override def offer(elem: Any): Future[QueueOfferResult] = Future{Enqueued}

        override def watchCompletion(): Future[Done] = Future{Done}
    }

    override def afterAll: Unit = {
        shutdown()
    }

    //Create an test event listener for the local message bus
    val testEventListener = TestProbe()
    system.eventStream.subscribe(testEventListener.ref, classOf[ExampleAppEvent])


    "Sending ActivatedProducerStream to EventProducer in receive state" should {
        "save the stream ref and change state to producing " in {
            testProducer ! ActivatedProducerStream(mockProducerStream, "TestTopic")
            Thread.sleep(500)
            producerActor.producerStream should be(mockProducerStream)
            EventFilter.error(message = "EventProducer got the unknown message while producing: testMessage", occurrences = 1) intercept {
                testProducer ! "testMessage"
            }
        }
    }

    "Sending ExampleAppEvent to system bus while EventProducer is in publishEvent state" should {
        "offer the ExampleAppEvent to the stream " in {
            val producingState = producerActor.publishEvent
            producerActor.context.become(producingState)
            producerActor.producerStream = mockProducerStream
            val dateFormat = new java.text.SimpleDateFormat("dd:MM:yy:HH:mm:ss.SSS")
            lazy val timetag = dateFormat.format(new Date(System.currentTimeMillis()))
            val eventMsg = MessagesPublished(5)
            val testMessage = ExampleAppEvent(timetag,Serialization.serializedActorPath(self),eventMsg.toString)
            system.eventStream.publish(testMessage)
            testEventListener.expectMsgPF(){
                case ExampleAppEvent(_,_,m) => if (m == eventMsg.toString) () else fail()
            }
        }
    }
 } 
Example 57
Source File: CodebaseAnalyzeAggregatorActor.scala    From CodeAnalyzerTutorial   with Apache License 2.0 5 votes vote down vote up
package tutor

import java.util.Date

import akka.actor.{Actor, ActorLogging, ActorRef, Cancellable, Props, Terminated}
import akka.routing.{ActorRefRoutee, RoundRobinRoutingLogic, Router}
import tutor.CodebaseAnalyzeAggregatorActor.{AnalyzeDirectory, Complete, Report, Timeout}
import tutor.SourceCodeAnalyzerActor.NewFile
import tutor.utils.BenchmarkUtil

import scala.concurrent.duration._
import scala.util.{Failure, Success, Try}

object CodebaseAnalyzeAggregatorActor {
  def props(): Props = Props(new CodebaseAnalyzeAggregatorActor)

  final case class AnalyzeDirectory(path: String)

  final case class Complete(result: Try[SourceCodeInfo])

  final case object Timeout

  final case class Report(codebaseInfo: CodebaseInfo)

}

class CodebaseAnalyzeAggregatorActor extends Actor with ActorLogging with DirectoryScanner with ReportFormatter {
  var controller: ActorRef = _
  var currentPath: String = _
  var beginTime: Date = _
  var fileCount = 0
  var completeCount = 0
  var failCount = 0
  var result: CodebaseInfo = CodebaseInfo.empty
  var timeoutTimer: Cancellable = _

  var router: Router = {
    val routees = Vector.fill(8) {
      val r = context.actorOf(SourceCodeAnalyzerActor.props())
      context watch r
      ActorRefRoutee(r)
    }
    Router(RoundRobinRoutingLogic(), routees)
  }

  override def receive: Receive = {
    case AnalyzeDirectory(path) => {
      controller = sender()
      currentPath = path
      beginTime = BenchmarkUtil.recordStart(s"analyze folder $currentPath")
      foreachFile(path, PresetFilters.knownFileTypes, PresetFilters.ignoreFolders) { file =>
        fileCount += 1
        router.route(NewFile(file.getAbsolutePath), context.self)
      }
      import context.dispatcher
      timeoutTimer = context.system.scheduler.scheduleOnce((fileCount / 1000).seconds, context.self, Timeout)
    }
    case Complete(Success(sourceCodeInfo: SourceCodeInfo)) => {
      completeCount += 1
      result = result + sourceCodeInfo
      finishIfAllComplete()
    }
    case Complete(Failure(exception)) => {
      completeCount += 1
      failCount += 1
      log.warning("processing file failed {}", exception)
      finishIfAllComplete()
    }
    case Timeout => {
      println(s"${result.totalFileNums} of $fileCount files processed before timeout")
      controller ! Report(result)
      BenchmarkUtil.recordElapse(s"analyze folder $currentPath", beginTime)
    }
    case Terminated(a) =>
      router = router.removeRoutee(a)
      val r = context.actorOf(Props[SourceCodeAnalyzerActor])
      context watch r
      router = router.addRoutee(r)
    case x@_ => log.error(s"receive unknown message $x")
  }

  def finishIfAllComplete(): Unit = {
    if (completeCount == fileCount) {
      timeoutTimer.cancel()
      controller ! Report(result)
      BenchmarkUtil.recordElapse(s"analyze folder $currentPath", beginTime)
      context.stop(self)
    }
  }
} 
Example 58
Source File: BenchmarkUtil.scala    From CodeAnalyzerTutorial   with Apache License 2.0 5 votes vote down vote up
package tutor.utils

import java.text.SimpleDateFormat
import java.util.Date

import com.typesafe.scalalogging.StrictLogging

object BenchmarkUtil extends StrictLogging {
  def record[T](actionDesc: String)(action: => T): T = {
    val beginTime = new Date
    logger.info(s"begin $actionDesc")
    val rs = action
    logger.info(s"end $actionDesc")
    val endTime = new Date
    val elapsed = new Date(endTime.getTime - beginTime.getTime)
    val sdf = new SimpleDateFormat("mm:ss.SSS")
    logger.info(s"$actionDesc total elapsed ${sdf.format(elapsed)}")
    rs
  }
  def recordStart(actionDesc: String):Date = {
    logger.info(s"$actionDesc begin")
    new Date
  }

  def recordElapse(actionDesc: String, beginFrom: Date):Unit = {
    logger.info(s"$actionDesc ended")
    val endTime = new Date
    val elapsed = new Date(endTime.getTime - beginFrom.getTime)
    val sdf = new SimpleDateFormat("mm:ss.SSS")
    logger.info(s"$actionDesc total elapsed ${sdf.format(elapsed)}")
  }
} 
Example 59
Source File: CloudFrontSigner.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.database.s3
import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets.UTF_8
import java.security.PrivateKey
import java.time.Instant
import java.util.Date

import akka.http.scaladsl.model.Uri
import com.amazonaws.auth.PEM
import com.amazonaws.services.cloudfront.CloudFrontUrlSigner
import com.amazonaws.services.cloudfront.util.SignerUtils
import com.amazonaws.services.cloudfront.util.SignerUtils.Protocol

import scala.concurrent.duration._

case class CloudFrontConfig(domainName: String,
                            keyPairId: String,
                            privateKey: String,
                            timeout: FiniteDuration = 10.minutes)

case class CloudFrontSigner(config: CloudFrontConfig) extends UrlSigner {
  private val privateKey = createPrivateKey(config.privateKey)

  override def getSignedURL(s3ObjectKey: String): Uri = {
    val resourcePath = SignerUtils.generateResourcePath(Protocol.https, config.domainName, s3ObjectKey)
    val date = Date.from(Instant.now().plusSeconds(config.timeout.toSeconds))
    val url = CloudFrontUrlSigner.getSignedURLWithCannedPolicy(resourcePath, config.keyPairId, privateKey, date)
    Uri(url)
  }

  override def toString: String = s"CloudFront Signer - ${config.domainName}"

  private def createPrivateKey(keyContent: String): PrivateKey = {
    val is = new ByteArrayInputStream(keyContent.getBytes(UTF_8))
    PEM.readPrivateKey(is)
  }
} 
Example 60
Source File: Vehicle.scala    From BusFloatingData   with Apache License 2.0 5 votes vote down vote up
package de.nierbeck.floating.data.domain

import java.util.Date

case class Vehicle(
  id: String,
  time: Option[Date] = None,
  latitude: Double,
  longitude: Double,
  heading: Integer,
  route_id: Option[String] = None,
  run_id: String = "none",
  seconds_since_report: Integer = 0
)

case class TiledVehicle(
  tileId: String,
  timeID: Date,
  id: String,
  time: Option[Date] = None,
  latitude: Double,
  longitude: Double,
  heading: Integer,
  route_id: Option[String] = None,
  run_id: String = "none",
  seconds_since_report: Integer = 0
) 
Example 61
Source File: Note.scala    From spring-cloud-demo   with Apache License 2.0 5 votes vote down vote up
package com.jesperdj.example.service.whiteboard.domain

import java.lang.{Long => JavaLong}
import java.util.Date
import javax.persistence.{Column, Entity, GeneratedValue, Id}

import scala.beans.BeanProperty

@Entity
class Note {

  @Id
  @GeneratedValue
  @BeanProperty
  var id: JavaLong = _

  @Column(nullable = false)
  @BeanProperty
  var createdDateTime: Date = _

  @Column(nullable = false, length = 40)
  @BeanProperty
  var authorName: String = _

  @Column(nullable = false, length = 1000)
  @BeanProperty
  var content: String = _
} 
Example 62
Source File: WhiteboardClient.scala    From spring-cloud-demo   with Apache License 2.0 5 votes vote down vote up
package com.jesperdj.example.client.whiteboard

import java.util.Date

import com.jesperdj.example.client.whiteboard.domain.Note
import org.springframework.cloud.netflix.feign.FeignClient
import org.springframework.hateoas.Resources
import org.springframework.stereotype.Component
import org.springframework.web.bind.annotation.{RequestMapping, RequestMethod}

import scala.collection.JavaConversions._

@FeignClient(name = "whiteboard-service", fallback = classOf[WhiteboardClientFallback])
trait WhiteboardClient {

  @RequestMapping(method = Array(RequestMethod.GET), path = Array("/notes"))
  def getAllNotes: Resources[Note]

  @RequestMapping(method = Array(RequestMethod.POST), path = Array("/notes"))
  def addNote(note: Note): Note
}

@Component
class WhiteboardClientFallback extends WhiteboardClient {

  override def getAllNotes: Resources[Note] = {
    val note = new Note
    note.createdDateTime = new Date()
    note.authorName = "System"
    note.content = "The whiteboard is currently not available."
    new Resources[Note](Seq(note), Seq())
  }

  override def addNote(note: Note): Note = note // Do nothing
} 
Example 63
Source File: TimeCodec.scala    From airframe   with Apache License 2.0 5 votes vote down vote up
package wvlet.airframe.codec
import java.time.Instant
import java.util.Date

import wvlet.airframe.msgpack.io.ByteArrayBuffer
import wvlet.airframe.msgpack.spi._
import wvlet.log.LogSupport

import scala.util.{Failure, Success, Try}


object JavaInstantTimeCodec extends MessageCodec[Instant] {
  override def pack(p: Packer, v: Instant): Unit = {
    // TODO airframe-msgpack in Codec interface
    // Use msgpack Timestamp type
    val buf    = ByteArrayBuffer.newBuffer(15)
    val cursor = WriteCursor(buf, 0)
    OffsetPacker.packTimestamp(cursor, v)
    val extData = buf.readBytes(0, cursor.lastWrittenBytes)
    p.writePayload(extData, 0, cursor.lastWrittenBytes)
  }

  override def unpack(u: Unpacker, v: MessageContext): Unit = {
    Try {
      u.getNextFormat.getValueType match {
        case ValueType.STRING =>
          // Use ISO instant formatter
          val isoInstantFormat = u.unpackString
          wvlet.airframe.codec.Compat
            .parseInstant(isoInstantFormat)
            .getOrElse(Instant.ofEpochMilli(isoInstantFormat.toLong))
        case ValueType.INTEGER =>
          val epochMillis = u.unpackLong
          Instant.ofEpochMilli(epochMillis)
        case ValueType.EXTENSION =>
          u.unpackTimestamp
        case other =>
          v.setIncompatibleFormatException(this, s"Cannot create Instant from ${other} type")
      }
    } match {
      case Success(x) => v.setObject(x)
      case Failure(e) => v.setError(e)
    }
  }
}

object JavaUtilDateCodec extends MessageCodec[Date] with LogSupport {
  override def pack(p: Packer, v: Date): Unit = {
    // Use Instant for encoding
    JavaInstantTimeCodec.pack(p, v.toInstant)
  }
  override def unpack(u: Unpacker, v: MessageContext): Unit = {
    JavaInstantTimeCodec.unpack(u, v)
    if (!v.isNull) {
      v.setObject(Date.from(v.getLastValue.asInstanceOf[Instant]))
    }
  }
} 
Example 64
Source File: JavaTimeCodec.scala    From airframe   with Apache License 2.0 5 votes vote down vote up
package wvlet.airframe.codec
import java.time.{Instant, ZonedDateTime}
import java.util.Date

import wvlet.airframe.msgpack.io.ByteArrayBuffer
import wvlet.airframe.msgpack.spi._
import wvlet.airframe.surface.Surface
import wvlet.log.LogSupport

import scala.util.{Failure, Success, Try}


object JavaTimeCodec {
  val javaTimeCodecs = Map(
    Surface.of[ZonedDateTime] -> ZonedDateTimeCodec,
    Surface.of[Date]          -> JavaUtilDateCodec
  )

  object ZonedDateTimeCodec extends MessageCodec[ZonedDateTime] {
    override def pack(p: Packer, v: ZonedDateTime): Unit = {
      // Use java standard ZonedDateTime string repr such as "2007-12-03T10:15:30+01:00[Europe/Paris]"
      p.packString(v.toString)
    }

    override def unpack(u: Unpacker, v: MessageContext): Unit = {
      val zonedDateTimeStr = u.unpackString
      Try(ZonedDateTime.parse(zonedDateTimeStr)) match {
        case Success(zd) =>
          v.setObject(zd)
        case Failure(e) =>
          v.setIncompatibleFormatException(
            this,
            s"${zonedDateTimeStr} cannot be read as ZonedDateTime: ${e.getMessage}"
          )
      }
    }
  }

} 
Example 65
Source File: L3-DStreamMapping.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{ Milliseconds, Seconds, StreamingContext }
import org.apache.hadoop.io.{ Text, LongWritable, IntWritable }
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.spark.streaming.dstream.DStream
import org.apache.hadoop.mapred.TextOutputFormat
import org.apache.hadoop.mapreduce.lib.output.{ TextOutputFormat => NewTextOutputFormat }
import org.apache.spark.streaming.dstream.PairDStreamFunctions
import org.apache.log4j.LogManager
import org.json4s._
import org.json4s.native.JsonMethods._
import java.text.SimpleDateFormat
import java.util.Date

object RedditMappingApp {
  def main(args: Array[String]) {
    if (args.length != 2) {
      System.err.println(
        "Usage: RedditMappingApp <appname> <input_path>")
      System.exit(1)
    }
    val Seq(appName, inputPath) = args.toSeq
    val LOG = LogManager.getLogger(this.getClass)

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val ssc = new StreamingContext(conf, Seconds(1))
    LOG.info("Started at %d".format(ssc.sparkContext.startTime))

    val comments = ssc.fileStream[LongWritable, Text, TextInputFormat](inputPath, (f: Path) => true, newFilesOnly = false).map(pair => pair._2.toString)

    val sdf = new SimpleDateFormat("yyyy-MM-dd")
    val tsKey = "created_utc"
    val secs = 1000L
    val keyedByDay = comments.map(rec => {
      val ts = (parse(rec) \ tsKey).values
      (sdf.format(new Date(ts.toString.toLong * secs)), rec)
    })

    val keyedByDayPart = comments.mapPartitions(iter => {
      var ret = List[(String, String)]()
      while (iter.hasNext) {
        val rec = iter.next
        val ts = (parse(rec) \ tsKey).values
        ret.::=(sdf.format(new Date(ts.toString.toLong * secs)), rec)
      }
      ret.iterator
    })

    val wordTokens = comments.map(rec => {
      ((parse(rec) \ "body")).values.toString.split(" ")
    })

    val wordTokensFlat = comments.flatMap(rec => {
      ((parse(rec) \ "body")).values.toString.split(" ")
    })

    val filterSubreddit = comments.filter(rec =>
      (parse(rec) \ "subreddit").values.toString.equals("AskReddit"))

    val sortedByAuthor = comments.transform(rdd =>
      (rdd.sortBy(rec => (parse(rec) \ "author").values.toString)))

    ssc.start()
    ssc.awaitTermination()

  }
} 
Example 66
Source File: L3-DStreamKeyValue.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{ Milliseconds, Seconds, StreamingContext }
import org.apache.hadoop.io.{ Text, LongWritable, IntWritable }
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.spark.streaming.dstream.DStream
import org.apache.hadoop.mapred.TextOutputFormat
import org.apache.hadoop.mapreduce.lib.output.{ TextOutputFormat => NewTextOutputFormat }
import org.apache.spark.streaming.dstream.PairDStreamFunctions
import org.apache.log4j.LogManager
import org.json4s._
import org.json4s.native.JsonMethods._
import java.text.SimpleDateFormat
import java.util.Date
import org.apache.spark.HashPartitioner

object RedditKeyValueApp {
  def main(args: Array[String]) {
    if (args.length != 3) {
      System.err.println(
        "Usage: RedditKeyValueApp <appname> <input_path> <input_path_popular>")
      System.exit(1)
    }
    val Seq(appName, inputPath, inputPathPopular) = args.toSeq
    val LOG = LogManager.getLogger(this.getClass)

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val ssc = new StreamingContext(conf, Seconds(1))
    LOG.info("Started at %d".format(ssc.sparkContext.startTime))

    val comments = ssc.fileStream[LongWritable, Text, TextInputFormat](inputPath, (f: Path) => true, newFilesOnly = false).map(pair => pair._2.toString)

    val popular = ssc.fileStream[LongWritable, Text, TextInputFormat](inputPathPopular, (f: Path) => true, newFilesOnly = false).map(pair => pair._2.toString)

    val topAuthors = comments.map(rec => ((parse(rec) \ "author").values.toString, 1))
      .groupByKey()
      .map(r => (r._2.sum, r._1))
      .transform(rdd => rdd.sortByKey(ascending = false))

    val topAuthors2 = comments.map(rec => ((parse(rec) \ "author").values.toString, 1))
      .reduceByKey(_ + _)
      .map(r => (r._2, r._1))
      .transform(rdd => rdd.sortByKey(ascending = false))

    val topAuthorsByAvgContent = comments.map(rec => ((parse(rec) \ "author").values.toString, (parse(rec) \ "body").values.toString.split(" ").length))
      .combineByKey(
        (v) => (v, 1),
        (accValue: (Int, Int), v) => (accValue._1 + v, accValue._2 + 1),
        (accCombine1: (Int, Int), accCombine2: (Int, Int)) => (accCombine1._1 + accCombine2._1, accCombine1._2 + accCombine2._2),
        new HashPartitioner(ssc.sparkContext.defaultParallelism))
      .map({ case (k, v) => (k, v._1 / v._2.toFloat) })
      .map(r => (r._2, r._1))
      .transform(rdd => rdd.sortByKey(ascending = false))

    val keyedBySubreddit = comments.map(rec => (((parse(rec)) \ "subreddit").values.toString, rec))
    val keyedBySubreddit2 = popular.map(rec => ({
      val t = rec.split(",")
      (t(1).split("/")(4), t(0))
    }))
    val commentsWithIndustry = keyedBySubreddit.join(keyedBySubreddit2)

    val keyedBySubredditCo = comments.map(rec => (((parse(rec)) \ "subreddit").values.toString, rec))
    val keyedBySubredditCo2 = popular.map(rec => ({
      val t = rec.split(",")
      (t(1).split("/")(4), t(0))
    }))
    val commentsWithIndustryCo = keyedBySubreddit.cogroup(keyedBySubreddit2)

    val checkpointPath = "/tmp"
    ssc.checkpoint(checkpointPath)
    val updateFunc = (values: Seq[Int], state: Option[Int]) => {
      val currentCount = values.sum
      val previousCount = state.getOrElse(0)
      Some(currentCount + previousCount)
    }
    val keyedBySubredditState = comments.map(rec => (((parse(rec)) \ "subreddit").values.toString, 1))
    val globalCount = keyedBySubredditState.updateStateByKey(updateFunc)
      .map(r => (r._2, r._1))
      .transform(rdd => rdd.sortByKey(ascending = false))

    ssc.start()
    ssc.awaitTermination()

  }
} 
Example 67
Source File: L3-DStreamVariation.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{ Milliseconds, Seconds, StreamingContext }
import org.apache.hadoop.io.{ Text, LongWritable, IntWritable }
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.spark.streaming.dstream.DStream
import org.apache.hadoop.mapred.TextOutputFormat
import org.apache.hadoop.mapreduce.lib.output.{ TextOutputFormat => NewTextOutputFormat }
import org.apache.spark.streaming.dstream.PairDStreamFunctions
import org.apache.log4j.LogManager
import org.json4s._
import org.json4s.native.JsonMethods._
import java.text.SimpleDateFormat
import java.util.Date

object RedditVariationApp {
  def main(args: Array[String]) {
    if (args.length != 2) {
      System.err.println(
        "Usage: RedditVariationApp <appname> <input_path>")
      System.exit(1)
    }
    val Seq(appName, inputPath) = args.toSeq
    val LOG = LogManager.getLogger(this.getClass)

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val ssc = new StreamingContext(conf, Seconds(1))
    LOG.info("Started at %d".format(ssc.sparkContext.startTime))

    val comments = ssc.fileStream[LongWritable, Text, TextInputFormat](inputPath, (f: Path) => true, newFilesOnly = false).map(pair => pair._2.toString)

    val merged = comments.union(comments)

    val repartitionedComments = comments.repartition(4)

    val rddMin = comments.glom().map(arr =>
      arr.minBy(rec => ((parse(rec) \ "created_utc").values.toString.toInt)))

    ssc.start()
    ssc.awaitTermination()

  }
} 
Example 68
Source File: L3-DStreamWindowAndAction.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{ Milliseconds, Seconds, StreamingContext }
import org.apache.hadoop.io.{ Text, LongWritable, IntWritable }
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.spark.streaming.dstream.DStream
import org.apache.hadoop.mapred.TextOutputFormat
import org.apache.hadoop.mapreduce.lib.output.{ TextOutputFormat => NewTextOutputFormat }
import org.apache.spark.streaming.dstream.PairDStreamFunctions
import org.apache.log4j.LogManager
import org.json4s._
import org.json4s.native.JsonMethods._
import java.text.SimpleDateFormat
import java.util.Date
import org.apache.spark.HashPartitioner

object RedditWindowAndActionApp {
  def main(args: Array[String]) {
    if (args.length != 2) {
      System.err.println(
        "Usage: RedditWindowAndActionApp <appname> <input_path>")
      System.exit(1)
    }
    val Seq(appName, inputPath) = args.toSeq
    val LOG = LogManager.getLogger(this.getClass)

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val ssc = new StreamingContext(conf, Seconds(1))
    LOG.info("Started at %d".format(ssc.sparkContext.startTime))

    val comments = ssc.fileStream[LongWritable, Text, TextInputFormat](inputPath, (f: Path) => true, newFilesOnly = false).map(pair => pair._2.toString)

    val checkpointPath = "/tmp"
    ssc.checkpoint(checkpointPath)
    val updateFunc = (values: Seq[Int], state: Option[Int]) => {
      val currentCount = values.sum
      val previousCount = state.getOrElse(0)
      Some(currentCount + previousCount)
    }
    val keyedBySubredditState = comments.map(rec => (((parse(rec)) \ "subreddit").values.toString, 1))
    val globalCount = keyedBySubredditState.updateStateByKey(updateFunc)
      .map(r => (r._2, r._1))
      .transform(rdd => rdd.sortByKey(ascending = false))

    val distinctSubreddits = comments.map(rec => ((parse(rec)) \ "subreddit").values.toString)
    val windowedRecs = distinctSubreddits.window(Seconds(5), Seconds(5))
    val windowedCounts = windowedRecs.countByValue()

    windowedCounts.print(10)
    windowedCounts.saveAsObjectFiles("subreddit", "obj")
    windowedCounts.saveAsTextFiles("subreddit", "txt")

    globalCount.saveAsHadoopFiles("subreddit", "hadoop",
      classOf[IntWritable], classOf[Text], classOf[TextOutputFormat[IntWritable, Text]])
    globalCount.saveAsNewAPIHadoopFiles("subreddit", "newhadoop",
      classOf[IntWritable], classOf[Text], classOf[NewTextOutputFormat[IntWritable, Text]])
    comments.foreachRDD(rdd => {
      LOG.info("RDD: %s, Count: %d".format(rdd.id, rdd.count()))
    })

    ssc.start()
    ssc.awaitTermination()

  }
} 
Example 69
Source File: L3-DStreamAggregation.scala    From prosparkstreaming   with Apache License 2.0 5 votes vote down vote up
package org.apress.prospark

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{ Milliseconds, Seconds, StreamingContext }
import org.apache.hadoop.io.{ Text, LongWritable, IntWritable }
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.spark.streaming.dstream.DStream
import org.apache.hadoop.mapred.TextOutputFormat
import org.apache.hadoop.mapreduce.lib.output.{ TextOutputFormat => NewTextOutputFormat }
import org.apache.spark.streaming.dstream.PairDStreamFunctions
import org.apache.log4j.LogManager
import org.json4s._
import org.json4s.native.JsonMethods._
import java.text.SimpleDateFormat
import java.util.Date

object RedditAggregationApp {
  def main(args: Array[String]) {
    if (args.length != 2) {
      System.err.println(
        "Usage: RedditAggregationApp <appname> <input_path>")
      System.exit(1)
    }
    val Seq(appName, inputPath) = args.toSeq
    val LOG = LogManager.getLogger(this.getClass)

    val conf = new SparkConf()
      .setAppName(appName)
      .setJars(SparkContext.jarOfClass(this.getClass).toSeq)

    val ssc = new StreamingContext(conf, Seconds(1))
    LOG.info("Started at %d".format(ssc.sparkContext.startTime))

    val comments = ssc.fileStream[LongWritable, Text, TextInputFormat](inputPath, (f: Path) => true, newFilesOnly = false).map(pair => pair._2.toString)

    val recCount = comments.count()

    val recCountValue = comments.countByValue()

    val totalWords = comments.map(rec => ((parse(rec) \ "body").values.toString))
      .flatMap(body => body.split(" "))
      .map(word => 1)
      .reduce(_ + _)

    ssc.start()
    ssc.awaitTermination()

  }
} 
Example 70
Source File: MockDataHandler.scala    From play2-oauth2-provider   with MIT License 5 votes vote down vote up
package scalaoauth2.provider

import java.util.Date

import scala.concurrent.Future

class MockDataHandler extends DataHandler[User] {

  override def validateClient(
      maybeClientCredential: Option[ClientCredential],
      request: AuthorizationRequest
  ): Future[Boolean] = Future.successful(false)

  override def findUser(
      maybeClientCredential: Option[ClientCredential],
      request: AuthorizationRequest
  ): Future[Option[User]] = Future.successful(None)

  override def createAccessToken(
      authInfo: AuthInfo[User]
  ): Future[AccessToken] =
    Future.successful(AccessToken("", Some(""), Some(""), Some(0L), new Date()))

  override def findAuthInfoByCode(
      code: String
  ): Future[Option[AuthInfo[User]]] = Future.successful(None)

  override def findAuthInfoByRefreshToken(
      refreshToken: String
  ): Future[Option[AuthInfo[User]]] = Future.successful(None)

  override def findAccessToken(token: String): Future[Option[AccessToken]] =
    Future.successful(None)

  override def findAuthInfoByAccessToken(
      accessToken: AccessToken
  ): Future[Option[AuthInfo[User]]] = Future.successful(None)

  override def getStoredAccessToken(
      authInfo: AuthInfo[User]
  ): Future[Option[AccessToken]] = Future.successful(None)

  override def refreshAccessToken(
      authInfo: AuthInfo[User],
      refreshToken: String
  ): Future[AccessToken] =
    Future.successful(AccessToken("", Some(""), Some(""), Some(0L), new Date()))

  override def deleteAuthCode(code: String): Future[Unit] =
    Future.successful(())
}

trait User {
  def id: Long
  def name: String
}

case class MockUser(id: Long, name: String) extends User 
Example 71
Source File: package.scala    From sbt-flaky   with Apache License 2.0 5 votes vote down vote up
package flaky

import java.io.File
import java.text.SimpleDateFormat
import java.util.Date

import scalatags.Text
import scalatags.Text.all.{a, hr, href, p, _}

package object web {
  def footer(): Text.TypedTag[String] = {
    p(
      hr(),
      p(
        ReportCss.footer,
        "Created with ",
        a(href := "https://github.com/otrebski/sbt-flaky", "sbt-flaky plugin"), br,
        s"Report generated at ${new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date())}",
        s"Fugue icons are on Creative Common license"
      )
    )
  }

  def indexHtml(reportFile: File, historyFile: Option[File]): String = {
    val history = historyFile match {
      case Some(fileName) => a(href := fileName.getName, "History trends")
      case None =>
        p(
          "History trends report is not created. To enable history check documentation at ",
          a(href := "https://github.com/otrebski/sbt-flaky", "https://github.com/otrebski/sbt-flaky")
        )
    }

    html(
      head(link(rel := "stylesheet", href := "report.css")),
      body(
        h1(ReportCss.title, "Flaky test report"),
        h4(ReportCss.subtitle, a(href := reportFile.getName, "Report for last build")),
        h4(ReportCss.subtitle, history),
        footer()
      )
    ).render
  }

  def anchorTest(test: Test): String = s"${test.clazz}_${test.test}"

  def anchorClass(test: Test): String = test.clazz

  def anchorTestRun(testCase: TestCase): String = testCase.runName

  def singleTestDir(test: Test): String = test.clazz

  def singleTestFileName(test: Test): String = s"${test.test.replaceAll("/", "_")}.html"

  def linkToSingleTest(test: Test): String = singleTestDir(test) + "/" + singleTestFileName(test)

  def linkToSingleTestClass(clazz: String): String = s"flaky-report.html#$clazz"

  def linkToRunNameInSingleTest(test: Test,runName:String) = s"${linkToSingleTest(test)}#$runName"
} 
Example 72
Source File: History.scala    From sbt-flaky   with Apache License 2.0 5 votes vote down vote up
package flaky.history

import java.io.{File, FileFilter, InputStream}
import java.text.SimpleDateFormat
import java.util.Date

import flaky.{Flaky, FlakyTestReport, Io}
import org.apache.commons.vfs2.VFS

import scala.xml.XML

class History(project: String, historyDir: File, flakyReportDir: File, projectDir: File) {

  private val zipFileFilter = new FileFilter {
    override def accept(pathname: File): Boolean = pathname.getName.endsWith(".zip")
  }

  private def runFiles(historyDir: File): List[File] = historyDir.listFiles(zipFileFilter).toList

  def addCurrentToHistory(): Unit = {
    val timestamp = System.currentTimeMillis()

    val date = new SimpleDateFormat(History.dateFormat).format(new Date(timestamp))
    val gitCommit = Git(projectDir).currentId().toOption
    val historyReportDescription = HistoryReportDescription(timestamp, gitCommit)
    HistoryReportDescription.save(historyReportDescription, new File(flakyReportDir, History.descriptorFile))
    Zip.compressFolder(new File(historyDir, s"$date.zip"), flakyReportDir)
  }

  def removeToOldFromHistory(maxToKeep: Int): Unit = {
    runFiles(historyDir)
      .take(Math.max(runFiles(historyDir).size - maxToKeep, 0))
      .foreach(_.delete())
  }

  def createHistoryReport(): HistoryReport = {

    val historicalRuns: List[HistoricalRun] = runFiles(historyDir)
      .map(History.loadHistory)
    val date = new SimpleDateFormat("HH:mm dd-MM-YYYY").format(new Date())
    HistoryReport(project, date, historicalRuns)
  }


  def processHistory(): HistoryReport = {
    historyDir.mkdirs()
    addCurrentToHistory()
    removeToOldFromHistory(20)
    createHistoryReport()
  }
}


case class HistoryReportDescription(timestamp: Long, gitCommitHash: Option[String])

object HistoryReportDescription {

  def load(in: InputStream): HistoryReportDescription = {
    val descriptorXml = XML.load(in)
    val timestamp = (descriptorXml \ "timestamp").text.trim.toLong
    val gitHash = (descriptorXml \ "gitCommitHash").text.trim
    HistoryReportDescription(timestamp, Some(gitHash))
  }

  def save(historyReportDescription: HistoryReportDescription, file: File): Unit = {
    val xml =
      <HistoryReportDescription>
        <timestamp>
          {historyReportDescription.timestamp}
        </timestamp>
        <gitCommitHash>
          {historyReportDescription.gitCommitHash.getOrElse("")}
        </gitCommitHash>
      </HistoryReportDescription>
    val prettyXml = new scala.xml.PrettyPrinter(80, 2).format(xml)
    Io.writeToFile(file, prettyXml)
  }
}

object History {
  val descriptorFile = "descriptor.xml"
  val dateFormat = "yyyyMMdd-HHmmss"

  def loadHistory: (File) => HistoricalRun = {
    file => {
      val manager = VFS.getManager
      val uri = file.toURI.toString.replace("file:/", "zip:/")
      val fo = manager.resolveFile(uri)
      val report: FlakyTestReport = Flaky.createReportFromHistory(fo)
      val descriptorFile = Option(fo.getChild(History.descriptorFile))
      val dateFromFileName = file.getName.replace(".zip","")
      val hrd = descriptorFile
        .filter(_.exists())
        .map(f => HistoryReportDescription.load(f.getContent.getInputStream))
        .getOrElse(HistoryReportDescription(new SimpleDateFormat(dateFormat).parse(dateFromFileName).getTime, None))
      HistoricalRun(hrd, report)
    }
  }
} 
Example 73
Source File: FieldSequentialValue.scala    From schedoscope   with Apache License 2.0 5 votes vote down vote up
package org.schedoscope.test

import java.text.SimpleDateFormat
import java.util.Date

import org.schedoscope.dsl.{FieldLike, Structure}


object FieldSequentialValue {

  def get(f: FieldLike[_], i: Int, p: String): Any = {
    if (f.t == manifest[Int])
      i
    else if (f.t == manifest[Long])
      i.toLong
    else if (f.t == manifest[Byte])
      i.toByte
    else if (f.t == manifest[Boolean])
      i % 2 == 0
    else if (f.t == manifest[Double])
      i.toDouble
    else if (f.t == manifest[Float])
      i.toFloat
    else if (f.t == manifest[Date])
      new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX").format(new Date(i * 1000L))
    else if (f.t == manifest[String])
      f.n + "-" + p.format(i)
    else if (classOf[Structure].isAssignableFrom(f.t.runtimeClass)) {
      f.t.runtimeClass.newInstance().asInstanceOf[Structure].fields.map(sf => (sf.n, get(sf, i, p))).toMap
    } else if (f.t.runtimeClass == classOf[List[_]]) {
      List()
    } else if (f.t.runtimeClass == classOf[Map[_, _]])
      Map()
    else
      throw new RuntimeException("Cannot generate random values for: " + f.n + ", type is: " + f.t)
  }
} 
Example 74
Source File: Globals.scala    From schedoscope   with Apache License 2.0 5 votes vote down vote up
package schedoscope.example.osm

import java.text.SimpleDateFormat
import java.util.Date

import org.schedoscope.Settings
import org.schedoscope.dsl.View
import org.schedoscope.dsl.views.MonthlyParameterization

object Globals {
  def defaultHiveQlParameters(v: View) = {
    val baseParameters = Map(
      "env" -> v.env,
      "workflow_time" -> new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX").format(new Date),
      "workflow_name" -> v.getClass().getName())

    if (v.isInstanceOf[MonthlyParameterization])
      baseParameters ++ Map(
        "year" -> v.asInstanceOf[MonthlyParameterization].year.v.get,
        "month" -> v.asInstanceOf[MonthlyParameterization].month.v.get)
    else baseParameters
  }

  def defaultPigProperties(v: View) = Map(
    "exec.type" -> "MAPREDUCE",
    "mapred.job.tracker" -> Settings().jobTrackerOrResourceManager,
    "fs.default.name" -> Settings().nameNode,
    "workflow_time" -> new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX").format(new Date),
    "workflow_name" -> v.getClass().getName())
} 
Example 75
Source File: RetailRecommandSerialization.scala    From retail_analytics   with Apache License 2.0 5 votes vote down vote up
package models.json

import scalaz._
import scalaz.NonEmptyList._
import scalaz.Validation
import scalaz.Validation._
import Scalaz._
import net.liftweb.json._
import net.liftweb.json.scalaz.JsonScalaz._
import java.util.Date
import java.nio.charset.Charset
import controllers.SerializationBase
import models.{ RetailRecommand }




object RetailRecommandSerialization extends SerializationBase[RetailRecommand] {
  protected val ProductIdKey = "productId"
  protected val NoofOrdersKey = "nooforders" 
  

  override implicit val writer = new JSONW[RetailRecommand] {

    override def write(h: RetailRecommand): JValue = {
      JObject(
        JField(ProductIdKey, toJSON(h.productId)) ::
          JField(NoofOrdersKey, toJSON(h.nooforders)) ::          
          Nil)
    }
  }

  override implicit val reader = new JSONR[RetailRecommand] {

    override def read(json: JValue): Result[RetailRecommand] = {
      val productIdField = field[String](ProductIdKey)(json)
      val noofordersField = field[String](NoofOrdersKey)(json)      

      (productIdField |@| noofordersField ) {
        (productId: String, nooforders: String) =>
          new RetailRecommand(productId, nooforders)
      }
    }
  }
} 
Example 76
Source File: bson.scala    From picopickle   with MIT License 5 votes vote down vote up
package io.github.netvl.picopickle.backends.mongodb

import java.util.Date

import _root_.io.github.netvl.picopickle.{TypesComponent, DefaultPickler, ExceptionsComponent, BackendComponent}
import org.bson._
import org.bson.types.ObjectId

import scala.reflect.{ClassTag, classTag}

trait MongodbBsonBackendComponent extends BackendComponent {
  override val backend = MongodbBsonBackend
}

trait MongodbBsonSerializersComponent {
  this: MongodbBsonBackendComponent with TypesComponent =>

  private def identityBsonReadWriter[T <: backend.BValue : ClassTag] =
    ReadWriter.writing[T](identity).reading { case value: T => value }
      .orThrowing(whenReading = classTag[T].runtimeClass.getSimpleName, expected = classTag[T].runtimeClass.getSimpleName)

  implicit val bsonValueReadWriter: ReadWriter[BsonValue] =
    ReadWriter.writing[BsonValue](identity).reading(PartialFunction(identity))

  implicit val bsonDocumentReadWriter: ReadWriter[BsonDocument] = identityBsonReadWriter[BsonDocument]
  implicit val bsonArrayReadWriter: ReadWriter[BsonArray] = identityBsonReadWriter[BsonArray]
  implicit val bsonStringReadWriter: ReadWriter[BsonString] = identityBsonReadWriter[BsonString]
  implicit val bsonNumberReadWriter: ReadWriter[BsonNumber] = identityBsonReadWriter[BsonNumber]
  implicit val bsonBooleanReadWriter: ReadWriter[BsonBoolean] = identityBsonReadWriter[BsonBoolean]
  implicit val bsonNullReadWriter: ReadWriter[BsonNull] = identityBsonReadWriter[BsonNull]
  implicit val bsonObjectIdReadWriter: ReadWriter[BsonObjectId] = identityBsonReadWriter[BsonObjectId]
  implicit val bsonInt32ReadWriter: ReadWriter[BsonInt32] = identityBsonReadWriter[BsonInt32]
  implicit val bsonInt64ReadWriter: ReadWriter[BsonInt64] = identityBsonReadWriter[BsonInt64]
  implicit val bsonDoubleReadWriter: ReadWriter[BsonDouble] = identityBsonReadWriter[BsonDouble]
  implicit val bsonDateTimeReadWriter: ReadWriter[BsonDateTime] = identityBsonReadWriter[BsonDateTime]
  implicit val bsonBinaryReadWriter: ReadWriter[BsonBinary] = identityBsonReadWriter[BsonBinary]
  implicit val bsonSymbolReadWriter: ReadWriter[BsonSymbol] = identityBsonReadWriter[BsonSymbol]

  // TODO: add a test for this
  implicit val dateReadWriter: ReadWriter[Date] = ReadWriter.writing[Date](d => backend.makeDateTime(d.getTime))
    .reading {
      case backend.BsonExtract.DateTime(ts) => new Date(ts)
    }.orThrowing(whenReading = "date", expected = "datetime")

  implicit val symbolReadWriter: ReadWriter[Symbol] = ReadWriter.writing(backend.makeSymbol)
    .reading {
      case backend.BsonExtract.Symbol(sym) => sym
    }.orThrowing(whenReading = "symbol", expected = "symbol")

  implicit val binaryReadWriter: ReadWriter[Array[Byte]] = ReadWriter.writing(backend.makeBinary)
    .reading {
      case backend.BsonExtract.Binary(arr) => arr
    }.orThrowing(whenReading = "array of bytes", expected = "binary")

  implicit val intReadWriter: ReadWriter[Int] = ReadWriter.writing(backend.makeInt32)
    .reading {
      case backend.BsonExtract.Int32(n) => n
    }.orThrowing(whenReading = "int", expected = "32-bit integer")

  implicit val longReadWriter: ReadWriter[Long] = ReadWriter.writing(backend.makeInt64)
    .reading {
      case backend.BsonExtract.Int64(n) => n
    }.orThrowing(whenReading = "long", expected = "64-bit integer")

  implicit val doubleReadWriter: ReadWriter[Double] = ReadWriter.writing(backend.makeDouble)
    .reading {
      case backend.BsonExtract.Double(n) => n
    }.orThrowing(whenReading = "double", expected = "double")

  implicit val objectIdReadWriter: ReadWriter[ObjectId] = ReadWriter.writing(backend.makeObjectId)
    .reading {
      case backend.BsonExtract.ObjectId(oid) => oid
    }.orThrowing(whenReading = "object id", expected = "object id")
}

trait MongodbBsonPickler
  extends DefaultPickler
  with MongodbBsonBackendComponent
  with MongodbBsonSerializersComponent

object MongodbBsonPickler extends MongodbBsonPickler 
Example 77
Source File: S3Manager.scala    From project-matt   with MIT License 5 votes vote down vote up
package org.datafy.aws.app.matt.extras

import java.util.Date

import com.amazonaws.services.s3.AmazonS3ClientBuilder
import com.amazonaws.services.s3.model.ListObjectsV2Request

import scala.collection.JavaConverters._

case class S3KeySummary(
 bucketName: String, key: String, size: Int,
 lastModifiedDate: Option[Date] = None,
 continuationToken: Option[String] = None
)

object S3Manager {

  private val AWS_S3_CLIENT = AmazonS3ClientBuilder.defaultClient()

  val S3_MAX_SCAN_SIZE: Int = 3145728 * 1024 * 1024
  val S3_MAX_RESULTS: Int = 1000

  def getMyBucketsSummary() = {
    val allBuckets = AWS_S3_CLIENT.listBuckets()
    allBuckets.asScala.toList.map(_.getName)
  }

  def getBucketObjects(bucketName: String) = {
    val bucketObjects = AWS_S3_CLIENT.listObjectsV2(bucketName)
    val objectSummaries = bucketObjects.getObjectSummaries
    objectSummaries.asScala.toList.map{
      s3Object =>  S3KeySummary(s3Object.getBucketName, s3Object.getKey,
        s3Object.getSize.toInt, Some(s3Object.getLastModified)
      )
    }
  }

  def getBucketObjects(bucketName: String, keyPrefix: String,
                               lastScannedObject: Option[String] = None) = {
    val objectsV2Request = new ListObjectsV2Request()
                                .withBucketName(bucketName)
                                .withPrefix(keyPrefix)
                                .withMaxKeys(S3_MAX_RESULTS)
                                .withStartAfter(lastScannedObject.getOrElse(""))

    val objectSummaries = AWS_S3_CLIENT.listObjectsV2(objectsV2Request).getObjectSummaries
    objectSummaries.asScala.toList
      .filter( _.getKey != keyPrefix )
      .map {
        s3Object => S3KeySummary(s3Object.getBucketName, s3Object.getKey,
          s3Object.getSize.toInt, Some(s3Object.getLastModified)
      )
    }
  }

  def getObjectContentAsStream(bucketName: String, objectKey: String) ={
    val contentStream = AWS_S3_CLIENT.getObject(bucketName, objectKey)
    contentStream.getObjectContent
  }

  def computeTotalObjectSize(s3KeySummary: List[S3KeySummary]) = {
    val bucketSummaryTuple = s3KeySummary.map {
      s3Object => (s3Object.bucketName, s3Object.size)
    }.groupBy(_._1).mapValues(_.map(_._2).sum).toList
    bucketSummaryTuple
  }

  def computeTotalScanCost(): Unit = {

  }

} 
Example 78
Source File: MockDataHandler.scala    From akka-http-oauth2-provider   with MIT License 5 votes vote down vote up
package scalaoauth2.provider

import java.util.Date

import scala.concurrent.Future

class MockDataHandler extends DataHandler[User] {

  override def validateClient(
      maybeClientCredential: Option[ClientCredential],
      request: AuthorizationRequest
  ): Future[Boolean] = Future.successful(false)

  override def findUser(
      maybeClientCredential: Option[ClientCredential],
      request: AuthorizationRequest
  ): Future[Option[User]] = Future.successful(None)

  override def createAccessToken(
      authInfo: AuthInfo[User]
  ): Future[AccessToken] =
    Future.successful(AccessToken("", Some(""), Some(""), Some(0L), new Date()))

  override def findAuthInfoByCode(
      code: String
  ): Future[Option[AuthInfo[User]]] = Future.successful(None)

  override def findAuthInfoByRefreshToken(
      refreshToken: String
  ): Future[Option[AuthInfo[User]]] = Future.successful(None)

  override def findAccessToken(token: String): Future[Option[AccessToken]] =
    Future.successful(None)

  override def findAuthInfoByAccessToken(
      accessToken: AccessToken
  ): Future[Option[AuthInfo[User]]] = Future.successful(None)

  override def getStoredAccessToken(
      authInfo: AuthInfo[User]
  ): Future[Option[AccessToken]] = Future.successful(None)

  override def refreshAccessToken(
      authInfo: AuthInfo[User],
      refreshToken: String
  ): Future[AccessToken] =
    Future.successful(AccessToken("", Some(""), Some(""), Some(0L), new Date()))

  override def deleteAuthCode(code: String): Future[Unit] =
    Future.successful(())
}

trait User {
  def id: Long
  def name: String
}

case class MockUser(id: Long, name: String) extends User 
Example 79
Source File: GeneratorThread.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.benchmark.generator.threads

import java.util.{Date, UUID}

import akka.event.slf4j.SLF4JLogging
import com.stratio.benchmark.generator.runners.StoppedThreads
import com.stratio.kafka.benchmark.generator.kafka.KafkaProducer
import com.stratio.models.benchmark.generator.models.{RawModel, RawModelCommonData}
import kafka.producer.Producer
import org.json4s.native.Serialization._
import org.json4s.{DefaultFormats, Formats}

class GeneratorThread(producer: Producer[String,String], timeout: Long, stoppedThreads: StoppedThreads, topic: String)
  extends Runnable with SLF4JLogging with RawModelCommonData {

  implicit val formats: Formats = DefaultFormats

  var numberOfEvents = 0

  override def run: Unit = {
    generateRaw(new Date().getTime)
    producer.close()

    stoppedThreads.incrementNumberOfEvents(numberOfEvents)
    stoppedThreads.incrementNumberOfThreads
  }

  private def generateRaw(startTimeInMillis: Long): Unit = {
    while(((startTimeInMillis + timeout) - new Date().getTime) > 0) {
      val id = UUID.randomUUID.toString
      val timestamp = RawModel.generateTimestamp
      val clientId = RawModel.generateRandomInt(RawModel.Range_client_id._1, RawModel.Range_client_id._2)
      val latitude = clientIdGeo.get(clientId).get._1
      val longitude = clientIdGeo.get(clientId).get._2
      val paymentMethod = RawModel.generatePaymentMethod()
      val creditCard = clientIdCreditCard.get(clientId).get
      val shoppingCenter = RawModel.generateShoppingCenter()
      val employee = RawModel.generateRandomInt(RawModel.Range_employee._1, RawModel.Range_employee._2)

      val rawModel = new RawModel(
        id,
        timestamp,
        clientId,
        latitude,
        longitude,
        paymentMethod,
        creditCard,
        shoppingCenter,
        employee)

      KafkaProducer.send(producer, topic, write(rawModel))
      numberOfEvents = numberOfEvents + 1
    }
  }
} 
Example 80
Source File: PolicyErrorModel.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.serving.core.models.policy

import java.util.Date

object PhaseEnum extends Enumeration {
  val Input = Value("Input")
  val InputStream = Value("InputStream")
  val Parser = Value("Parser")
  val Operator = Value("Operator")
  val Cube = Value("Cube")
  val CubeStream = Value("CubeStream")
  val Output = Value("Output")
  val Trigger = Value("Trigger")
  val TriggerStream = Value("TriggerStream")
  val Execution = Value("Execution")
  val RawData = Value("RawData")
}

case class PolicyErrorModel(
                             message: String,
                             phase: PhaseEnum.Value,
                             originalMsg: String,
                             date: Date = new Date
                           ) 
Example 81
Source File: FileOutput.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta

import java.io.{Serializable => JSerializable}
import java.util.Date

import com.stratio.sparta.sdk.properties.ValidatingPropertyMap._
import com.stratio.sparta.sdk.pipeline.output.{Output, OutputFormatEnum, SaveModeEnum}
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.functions._
import org.apache.spark.sql.DataFrame


class FileOutput(name: String, properties: Map[String, JSerializable]) extends Output(name, properties) {

  val path = propertiesWithCustom.get("path").getOrElse(throw new IllegalArgumentException("Property " +
    "path is mandatory"))
  val createDifferentFiles = propertiesWithCustom.get("createDifferentFiles").getOrElse("true")

  override def save(dataFrame: DataFrame, saveMode: SaveModeEnum.Value, options: Map[String, String]): Unit = {
    val finalPath = if (createDifferentFiles.asInstanceOf[String].toBoolean){
      path.toString + new Date().getTime
    } else {
      path.toString
    }
    dataFrame.write.json(finalPath)
  }
} 
Example 82
Source File: DateTimeField.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.plugin.cube.field.datetime

import java.io.{Serializable => JSerializable}
import java.util.Date

import akka.event.slf4j.SLF4JLogging
import com.stratio.sparta.plugin.cube.field.datetime.DateTimeField._
import com.stratio.sparta.sdk.pipeline.aggregation.cube.{DimensionType, Precision}
import com.stratio.sparta.sdk.pipeline.schema.TypeOp
import com.stratio.sparta.sdk.pipeline.schema.TypeOp._
import com.stratio.sparta.sdk.properties.ValidatingPropertyMap._
import com.stratio.sparta.sdk.utils.AggregationTime
import org.joda.time.DateTime

case class DateTimeField(props: Map[String, JSerializable], override val defaultTypeOperation: TypeOp)
  extends DimensionType with JSerializable with SLF4JLogging {

  def this(defaultTypeOperation: TypeOp) {
    this(Map.empty[String, JSerializable], defaultTypeOperation)
  }

  def this(props: Map[String, JSerializable]) {
    this(props, TypeOp.Timestamp)
  }

  def this() {
    this(Map.empty[String, JSerializable], TypeOp.Timestamp)
  }

  override val operationProps: Map[String, JSerializable] = props

  override val properties: Map[String, JSerializable] = props ++ {
    if (props.getString(AggregationTime.GranularityPropertyName, None).isEmpty)
      Map(AggregationTime.GranularityPropertyName -> AggregationTime.DefaultGranularity)
    else Map.empty[String, JSerializable]
  }

  override def precision(keyName: String): Precision = {
    if (AggregationTime.precisionsMatches(keyName).nonEmpty) getPrecision(keyName, getTypeOperation(keyName))
    else TimestampPrecision
  }

  @throws(classOf[ClassCastException])
  override def precisionValue(keyName: String, value: Any): (Precision, Any) =
    try {
      val precisionKey = precision(keyName)
      (precisionKey,
        getPrecision(
          TypeOp.transformValueByTypeOp(TypeOp.DateTime, value).asInstanceOf[DateTime],
          precisionKey,
          properties
        ))
    }
    catch {
      case cce: ClassCastException =>
        log.error("Error parsing " + value + " .")
        throw cce
    }

  private def getPrecision(value: DateTime, precision: Precision, properties: Map[String, JSerializable]): Any = {
    TypeOp.transformValueByTypeOp(precision.typeOp,
      AggregationTime.truncateDate(value, precision match {
        case t if t == TimestampPrecision => if (properties.contains(AggregationTime.GranularityPropertyName))
          properties.get(AggregationTime.GranularityPropertyName).get.toString
        else AggregationTime.DefaultGranularity
        case _ => precision.id
      })).asInstanceOf[Any]
  }
}

object DateTimeField {

  final val TimestampPrecision = DimensionType.getTimestamp(Some(TypeOp.Timestamp), TypeOp.Timestamp)
} 
Example 83
Source File: LastValueOperatorTest.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.plugin.cube.operator.lastValue

import java.util.Date

import com.stratio.sparta.sdk.pipeline.aggregation.operator.Operator
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpec}

@RunWith(classOf[JUnitRunner])
class LastValueOperatorTest extends WordSpec with Matchers {

  "LastValue operator" should {

    val initSchema = StructType(Seq(
      StructField("field1", IntegerType, false),
      StructField("field2", IntegerType, false),
      StructField("field3", IntegerType, false)
    ))

    val initSchemaFail = StructType(Seq(
      StructField("field2", IntegerType, false)
    ))

    "processMap must be " in {
      val inputField = new LastValueOperator("lastValue", initSchema, Map())
      inputField.processMap(Row(1, 2)) should be(None)

      val inputFields2 = new LastValueOperator("lastValue", initSchemaFail, Map("inputField" -> "field1"))
      inputFields2.processMap(Row(1, 2)) should be(None)

      val inputFields3 = new LastValueOperator("lastValue", initSchema, Map("inputField" -> "field1"))
      inputFields3.processMap(Row(1, 2)) should be(Some(1))

      val inputFields4 = new LastValueOperator("lastValue", initSchema,
        Map("inputField" -> "field1", "filters" -> "[{\"field\":\"field1\", \"type\": \"<\", \"value\":2}]"))
      inputFields4.processMap(Row(1, 2)) should be(Some(1L))

      val inputFields5 = new LastValueOperator("lastValue", initSchema,
        Map("inputField" -> "field1", "filters" -> "[{\"field\":\"field1\", \"type\": \">\", \"value\":\"2\"}]"))
      inputFields5.processMap(Row(1, 2)) should be(None)

      val inputFields6 = new LastValueOperator("lastValue", initSchema,
        Map("inputField" -> "field1", "filters" -> {
          "[{\"field\":\"field1\", \"type\": \"<\", \"value\":\"2\"}," +
            "{\"field\":\"field2\", \"type\": \"<\", \"value\":\"2\"}]"
        }))
      inputFields6.processMap(Row(1, 2)) should be(None)
    }

    "processReduce must be " in {
      val inputFields = new LastValueOperator("lastValue", initSchema, Map())
      inputFields.processReduce(Seq()) should be(None)

      val inputFields2 = new LastValueOperator("lastValue", initSchema, Map())
      inputFields2.processReduce(Seq(Some(1), Some(2))) should be(Some(2))

      val inputFields3 = new LastValueOperator("lastValue", initSchema, Map())
      inputFields3.processReduce(Seq(Some("a"), Some("b"))) should be(Some("b"))
    }

    "associative process must be " in {
      val inputFields = new LastValueOperator("lastValue", initSchema, Map())
      val resultInput = Seq((Operator.OldValuesKey, Some(1L)),
        (Operator.NewValuesKey, Some(1L)),
        (Operator.NewValuesKey, None))
      inputFields.associativity(resultInput) should be(Some(1L))

      val inputFields2 = new LastValueOperator("lastValue", initSchema, Map("typeOp" -> "int"))
      val resultInput2 = Seq((Operator.OldValuesKey, Some(1L)),
        (Operator.NewValuesKey, Some(1L)))
      inputFields2.associativity(resultInput2) should be(Some(1))

      val inputFields3 = new LastValueOperator("lastValue", initSchema, Map("typeOp" -> null))
      val resultInput3 = Seq((Operator.OldValuesKey, Some(1)),
        (Operator.NewValuesKey, Some(2)))
      inputFields3.associativity(resultInput3) should be(Some(2))

      val inputFields4 = new LastValueOperator("lastValue", initSchema, Map())
      val resultInput4 = Seq()
      inputFields4.associativity(resultInput4) should be(None)

      val inputFields5 = new LastValueOperator("lastValue", initSchema, Map())
      val date = new Date()
      val resultInput5 = Seq((Operator.NewValuesKey, Some(date)))
      inputFields5.associativity(resultInput5) should be(Some(date))
    }
  }
} 
Example 84
Source File: FirstValueOperatorTest.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.plugin.cube.operator.firstValue

import java.util.Date

import com.stratio.sparta.sdk.pipeline.aggregation.operator.Operator
import org.apache.spark.sql.Row
import org.apache.spark.sql.types.{IntegerType, StructField, StructType}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpec}

@RunWith(classOf[JUnitRunner])
class FirstValueOperatorTest extends WordSpec with Matchers {

  "FirstValue operator" should {

    val initSchema = StructType(Seq(
      StructField("field1", IntegerType, false),
      StructField("field2", IntegerType, false),
      StructField("field3", IntegerType, false)
    ))

    val initSchemaFail = StructType(Seq(
      StructField("field2", IntegerType, false)
    ))

    "processMap must be " in {
      val inputField = new FirstValueOperator("firstValue", initSchema, Map())
      inputField.processMap(Row(1, 2)) should be(None)

      val inputFields2 = new FirstValueOperator("firstValue", initSchemaFail, Map("inputField" -> "field1"))
      inputFields2.processMap(Row(1, 2)) should be(None)

      val inputFields3 = new FirstValueOperator("firstValue", initSchema, Map("inputField" -> "field1"))
      inputFields3.processMap(Row(1, 2)) should be(Some(1))

      val inputFields4 = new FirstValueOperator("firstValue", initSchema,
        Map("inputField" -> "field1", "filters" -> "[{\"field\":\"field1\", \"type\": \"<\", \"value\":2}]"))
      inputFields4.processMap(Row(1, 2)) should be(Some(1L))

      val inputFields5 = new FirstValueOperator("firstValue", initSchema,
        Map("inputField" -> "field1", "filters" -> "[{\"field\":\"field1\", \"type\": \">\", \"value\":\"2\"}]"))
      inputFields5.processMap(Row(1, 2)) should be(None)

      val inputFields6 = new FirstValueOperator("firstValue", initSchema,
        Map("inputField" -> "field1", "filters" -> {
          "[{\"field\":\"field1\", \"type\": \"<\", \"value\":\"2\"}," +
            "{\"field\":\"field2\", \"type\": \"<\", \"value\":\"2\"}]"
        }))
      inputFields6.processMap(Row(1, 2)) should be(None)
    }

    "processReduce must be " in {
      val inputFields = new FirstValueOperator("firstValue", initSchema, Map())
      inputFields.processReduce(Seq()) should be(None)

      val inputFields2 = new FirstValueOperator("firstValue", initSchema, Map())
      inputFields2.processReduce(Seq(Some(1), Some(2))) should be(Some(1))

      val inputFields3 = new FirstValueOperator("firstValue", initSchema, Map())
      inputFields3.processReduce(Seq(Some("a"), Some("b"))) should be(Some("a"))
    }

    "associative process must be " in {
      val inputFields = new FirstValueOperator("firstValue", initSchema, Map())
      val resultInput = Seq((Operator.OldValuesKey, Some(1L)),
        (Operator.NewValuesKey, Some(1L)),
        (Operator.NewValuesKey, None))
      inputFields.associativity(resultInput) should be(Some(1L))

      val inputFields2 = new FirstValueOperator("firstValue", initSchema, Map("typeOp" -> "int"))
      val resultInput2 = Seq((Operator.OldValuesKey, Some(1L)),
        (Operator.NewValuesKey, Some(1L)))
      inputFields2.associativity(resultInput2) should be(Some(1))

      val inputFields3 = new FirstValueOperator("firstValue", initSchema, Map("typeOp" -> null))
      val resultInput3 = Seq((Operator.OldValuesKey, Some(1)),
        (Operator.NewValuesKey, Some(1)),
        (Operator.NewValuesKey, None))
      inputFields3.associativity(resultInput3) should be(Some(1))

      val inputFields4 = new FirstValueOperator("firstValue", initSchema, Map())
      val resultInput4 = Seq()
      inputFields4.associativity(resultInput4) should be(None)

      val inputFields5 = new FirstValueOperator("firstValue", initSchema, Map())
      val date = new Date()
      val resultInput5 = Seq((Operator.NewValuesKey, Some(date)))
      inputFields5.associativity(resultInput5) should be(Some(date))
    }
  }
} 
Example 85
Source File: DateTimeFieldTest.scala    From sparta   with Apache License 2.0 5 votes vote down vote up
package com.stratio.sparta.plugin.cube.field.datetime

import java.io.{Serializable => JSerializable}
import java.util.Date

import com.stratio.sparta.sdk.pipeline.schema.TypeOp
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{Matchers, WordSpecLike}

@RunWith(classOf[JUnitRunner])
class DateTimeFieldTest extends WordSpecLike with Matchers {

  val dateTimeDimension = new DateTimeField(Map("second" -> "long", "minute" -> "date", "typeOp" -> "datetime"))

  "A DateTimeDimension" should {
    "In default implementation, get 6 dimensions for a specific time" in {
      val newDate = new Date()
      val precision5s =
        dateTimeDimension.precisionValue("5s", newDate.asInstanceOf[JSerializable])
      val precision10s =
        dateTimeDimension.precisionValue("10s", newDate.asInstanceOf[JSerializable])
      val precision15s =
        dateTimeDimension.precisionValue("15s", newDate.asInstanceOf[JSerializable])
      val precisionSecond =
        dateTimeDimension.precisionValue("second", newDate.asInstanceOf[JSerializable])
      val precisionMinute =
        dateTimeDimension.precisionValue("minute", newDate.asInstanceOf[JSerializable])
      val precisionHour =
        dateTimeDimension.precisionValue("hour", newDate.asInstanceOf[JSerializable])
      val precisionDay =
        dateTimeDimension.precisionValue("day", newDate.asInstanceOf[JSerializable])
      val precisionMonth =
        dateTimeDimension.precisionValue("month", newDate.asInstanceOf[JSerializable])
      val precisionYear =
        dateTimeDimension.precisionValue("year", newDate.asInstanceOf[JSerializable])

      precision5s._1.id should be("5s")
      precision10s._1.id should be("10s")
      precision15s._1.id should be("15s")
      precisionSecond._1.id should be("second")
      precisionMinute._1.id should be("minute")
      precisionHour._1.id should be("hour")
      precisionDay._1.id should be("day")
      precisionMonth._1.id should be("month")
      precisionYear._1.id should be("year")
    }

    "Each precision dimension have their output type, second must be long, minute must be date, others datetime" in {
      dateTimeDimension.precision("5s").typeOp should be(TypeOp.DateTime)
      dateTimeDimension.precision("10s").typeOp should be(TypeOp.DateTime)
      dateTimeDimension.precision("15s").typeOp should be(TypeOp.DateTime)
      dateTimeDimension.precision("second").typeOp should be(TypeOp.Long)
      dateTimeDimension.precision("minute").typeOp should be(TypeOp.Date)
      dateTimeDimension.precision("day").typeOp should be(TypeOp.DateTime)
      dateTimeDimension.precision("month").typeOp should be(TypeOp.DateTime)
      dateTimeDimension.precision("year").typeOp should be(TypeOp.DateTime)
      dateTimeDimension.precision(DateTimeField.TimestampPrecision.id).typeOp should be(TypeOp.Timestamp)
    }
  }
} 
Example 86
Source File: GamerDataGenerator.scala    From SparkOnKudu   with Apache License 2.0 5 votes vote down vote up
package org.kududb.spark.demo.gamer.aggregates

import java.util.{Date, Random}

import org.kududb.spark.demo.gamer.GamerEvent

object GamerDataGenerator {

  val random = new Random()
  val averagePlayerPercentage = 40
  val advancedPlayerPercentage = 80
  val superStarPlayerPercentage = 100
  var date = System.currentTimeMillis()

  def makeNewGamerRecord(numOfGamers:Int): GamerEvent = {
    println("date" + new Date(date))
    date += 60000 * 60 * 6
    val playerSelection = random.nextInt(100)
    if (playerSelection < averagePlayerPercentage) {

      val gamerId = random.nextInt(numOfGamers/100) * 100 + playerSelection

      new GamerEvent(gamerId.toString,
        date,
        1,
        if (random.nextInt(10) > 7) 1 else 0,
        random.nextInt(10),
        random.nextInt(20),
        random.nextInt(1000),
        random.nextInt(2000))
    } else if (playerSelection < advancedPlayerPercentage) {
      val gamerId = random.nextInt(numOfGamers/100) * 100 + playerSelection

      new GamerEvent(gamerId.toString,
        date,
        1,
        if (random.nextInt(10) > 5) 1 else 0,
        random.nextInt(20),
        random.nextInt(18),
        random.nextInt(2000),
        random.nextInt(2000))
    } else {
      val gamerId = random.nextInt(numOfGamers/100) * 100 + playerSelection

      new GamerEvent(gamerId.toString,
        date,
        1,
        if (random.nextInt(10) > 3) 1 else 0,
        random.nextInt(20),
        random.nextInt(10),
        random.nextInt(4000),
        random.nextInt(1500))
    }
  }
} 
Example 87
Source File: StateEndpoint.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.channels

import akka.actor.Actor
import java.util.Date

case class StateEvent(time: Date, state: String)
case class Connection(time: Date, connected: Boolean)

class StateEndpoint extends Actor {
  def receive = {
    case Connection(time, true) => {
      context.system.eventStream.publish(new StateEvent(time, "Connected"))
    }
    case Connection(time, false) => {
      context.system.eventStream.publish(new StateEvent(time, "Disconnected"))
    }
  }
}

class SystemLog extends Actor {
  def receive = {
    case event: StateEvent => {
    }
  }
}

class SystemMonitor extends Actor {
  def receive = {
    case event: StateEvent => {
    }
  }
}


import akka.event.ActorEventBus
import akka.event.{ LookupClassification, EventBus }

class OrderMessageBus extends EventBus
  with LookupClassification
  with ActorEventBus {

  type Event = Order
  type Classifier = Boolean
  def mapSize = 2

  protected def classify(event: OrderMessageBus#Event) = {
    event.number > 1
  }

  protected def publish(event: OrderMessageBus#Event,
                        subscriber: OrderMessageBus#Subscriber) {
    subscriber ! event
  }
}


class MyEventBus extends EventBus with LookupClassification
  with ActorEventBus {

  type Event = AnyRef
  def mapSize = 2
  type Classifier = String

  protected def classify(event: MyEventBus#Event) = {
    "TestBus"
  }

  protected def publish(event: MyEventBus#Event,
                        subscriber: MyEventBus#Subscriber) {
    subscriber ! event
  }

  def subscribe(subscriber: Subscriber): Boolean =
    subscribers.put("TestBus", subscriber)
} 
Example 88
Source File: DeadLetterTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.channels

import akka.testkit.{ ImplicitSender, TestProbe, TestKit }
import akka.actor.{ PoisonPill, Props, DeadLetter, ActorSystem }
import org.scalatest.{WordSpecLike, BeforeAndAfterAll, MustMatchers}
import java.util.Date

class DeadLetterTest extends TestKit(ActorSystem("DeadLetterTest"))
  with WordSpecLike with BeforeAndAfterAll with MustMatchers
  with ImplicitSender {

  override def afterAll()  {
    system.terminate()
  }

  "DeadLetter" must {
    "catch messages send to deadLetters" in {
      val deadLetterMonitor = TestProbe()

      system.eventStream.subscribe(
        deadLetterMonitor.ref,
        classOf[DeadLetter])

      val msg = new StateEvent(new Date(), "Connected")
      system.deadLetters ! msg

      val dead = deadLetterMonitor.expectMsgType[DeadLetter]
      dead.message must be(msg)
      dead.sender must be(testActor)
      dead.recipient must be(system.deadLetters)
    }
    "catch deadLetter messages send to deadLetters" in {

      val deadLetterMonitor = TestProbe()
      val actor = system.actorOf(Props[EchoActor], "echo")

      system.eventStream.subscribe(
        deadLetterMonitor.ref,
        classOf[DeadLetter])

      val msg = new Order("me", "Akka in Action", 1)
      val dead = DeadLetter(msg, testActor, actor)
      system.deadLetters ! dead

      deadLetterMonitor.expectMsg(dead)

      system.stop(actor)

    }

    "catch messages send to terminated Actor" in {

      val deadLetterMonitor = TestProbe()

      system.eventStream.subscribe(
        deadLetterMonitor.ref,
        classOf[DeadLetter])

      val actor = system.actorOf(Props[EchoActor], "echo")
      actor ! PoisonPill
      val msg = new Order("me", "Akka in Action", 1)
      actor ! msg

      val dead = deadLetterMonitor.expectMsgType[DeadLetter]
      dead.message must be(msg)
      dead.sender must be(testActor)
      dead.recipient must be(actor)

    }

  }
} 
Example 89
Source File: AggregatorTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.structure

import java.util.Date
import scala.concurrent.duration._

import akka.testkit._
import akka.actor._

import org.scalatest._
import scala.language.postfixOps

class AggregatorTest
  extends TestKit(ActorSystem("AggregatorTest"))
  with WordSpecLike with BeforeAndAfterAll {
  val timeout = 2 seconds

  protected override def afterAll(): Unit = {
    system.terminate()
  }

  "The Agregator" must {
    "aggregate two messages" in {

      val endProbe = TestProbe()
      val actorRef = system.actorOf(
        Props(new Aggregator(timeout, endProbe.ref)))
      val photoStr = ImageProcessing.createPhotoString(new Date(), 60)
      val msg1 = PhotoMessage("id1",
        photoStr,
        Some(new Date()),
        None)
      actorRef ! msg1

      val msg2 = PhotoMessage("id1",
        photoStr,
        None,
        Some(60))
      actorRef ! msg2

      val combinedMsg = PhotoMessage("id1",
        photoStr,
        msg1.creationTime,
        msg2.speed)

      endProbe.expectMsg(combinedMsg)

    }
    "send message after timeout" in {

      val endProbe = TestProbe()
      val actorRef = system.actorOf(
        Props(new Aggregator(timeout, endProbe.ref)))
      val photoStr = ImageProcessing.createPhotoString(
        new Date(), 60)
      val msg1 = PhotoMessage("id1",
        photoStr,
        Some(new Date()),
        None)
      actorRef ! msg1

      endProbe.expectMsg(msg1)

    }
    "aggregate two messages when restarting" in {

      val endProbe = TestProbe()
      val actorRef = system.actorOf(
        Props(new Aggregator(timeout, endProbe.ref)))
      val photoStr = ImageProcessing.createPhotoString(new Date(), 60)

      val msg1 = PhotoMessage("id1",
        photoStr,
        Some(new Date()),
        None)
      actorRef ! msg1

      actorRef ! new IllegalStateException("restart")

      val msg2 = PhotoMessage("id1",
        photoStr,
        None,
        Some(60))
      actorRef ! msg2

      val combinedMsg = PhotoMessage("id1",
        photoStr,
        msg1.creationTime,
        msg2.speed)

      endProbe.expectMsg(combinedMsg)

    }
  }
} 
Example 90
Source File: ScatterGatherTest.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.structure

import java.util.Date
import scala.concurrent.duration._

import akka.actor._

import org.scalatest._
import akka.testkit._
import scala.language.postfixOps

class ScatterGatherTest
  extends TestKit(ActorSystem("ScatterGatherTest"))
  with WordSpecLike
  with BeforeAndAfterAll {

  val timeout = 2 seconds

  override def afterAll(): Unit = {
    system.terminate()
  }

  "The ScatterGather" must {
    "scatter the message and gather them again" in {

      val endProbe = TestProbe()
      val aggregateRef = system.actorOf(
        Props(new Aggregator(timeout, endProbe.ref)))
      val speedRef = system.actorOf(
        Props(new GetSpeed(aggregateRef)))
      val timeRef = system.actorOf(
        Props(new GetTime(aggregateRef)))
      val actorRef = system.actorOf(
        Props(new RecipientList(Seq(speedRef, timeRef))))

      val photoDate = new Date()
      val photoSpeed = 60
      val msg = PhotoMessage("id1",
        ImageProcessing.createPhotoString(photoDate, photoSpeed))

      actorRef ! msg

      val combinedMsg = PhotoMessage(msg.id,
        msg.photo,
        Some(photoDate),
        Some(photoSpeed))

      endProbe.expectMsg(combinedMsg)


    }
  }
} 
Example 91
Source File: ImageProcessing.scala    From 006877   with MIT License 5 votes vote down vote up
package aia.routing

import java.text.SimpleDateFormat
import java.util.Date

case class Photo(license: String, speed: Int)

object ImageProcessing {
  val dateFormat = new SimpleDateFormat("ddMMyyyy HH:mm:ss.SSS")
  def getSpeed(image: String): Option[Int] = {
    val attributes = image.split('|')
    if (attributes.size == 3)
      Some(attributes(1).toInt)
    else
      None
  }
  def getTime(image: String): Option[Date] = {
    val attributes = image.split('|')
    if (attributes.size == 3)
      Some(dateFormat.parse(attributes(0)))
    else
      None
  }
  def getLicense(image: String): Option[String] = {
    val attributes = image.split('|')
    if (attributes.size == 3)
      Some(attributes(2))
    else
      None
  }
  def createPhotoString(date: Date, speed: Int): String = {
    createPhotoString(date, speed, " ")
  }

  def createPhotoString(date: Date,
                        speed: Int,
                        license: String): String = {
    "%s|%s|%s".format(dateFormat.format(date), speed, license)
  }
} 
Example 92
Source File: SessionHeartbeat.scala    From incubator-livy   with Apache License 2.0 5 votes vote down vote up
package org.apache.livy.server.interactive

import java.util.Date

import scala.concurrent.duration.{Deadline, Duration, FiniteDuration}

import org.apache.livy.sessions.Session.RecoveryMetadata
import org.apache.livy.LivyConf
import org.apache.livy.server.SessionServlet
import org.apache.livy.sessions.{Session, SessionManager}


trait SessionHeartbeatWatchdog[S <: Session with SessionHeartbeat, R <: RecoveryMetadata] {
  self: SessionManager[S, R] =>

  private val watchdogThread = new Thread(s"HeartbeatWatchdog-${self.getClass.getName}") {
    override def run(): Unit = {
      val interval = livyConf.getTimeAsMs(LivyConf.HEARTBEAT_WATCHDOG_INTERVAL)
      info("Heartbeat watchdog thread started.")
      while (true) {
        deleteExpiredSessions()
        Thread.sleep(interval)
      }
    }
  }

  protected def start(): Unit = {
    assert(!watchdogThread.isAlive())

    watchdogThread.setDaemon(true)
    watchdogThread.start()
  }

  private[interactive] def deleteExpiredSessions(): Unit = {
    // Delete takes time. If we use .filter().foreach() here, the time difference between we check
    // expiration and the time we delete the session might be huge. To avoid that, check expiration
    // inside the foreach block.
    sessions.values.foreach { s =>
      if (s.heartbeatExpired) {
        info(s"Session ${s.id} expired. Last heartbeat is at ${s.lastHeartbeat}.")
        try { delete(s) } catch {
          case t: Throwable =>
            warn(s"Exception was thrown when deleting expired session ${s.id}", t)
        }
      }
    }
  }
} 
Example 93
Source File: SharedSparkContext.scala    From spark-testing-base   with Apache License 2.0 5 votes vote down vote up
package com.holdenkarau.spark.testing

import java.util.Date

import org.apache.spark._
import org.scalatest.{BeforeAndAfterAll, Suite}


trait SharedSparkContext extends BeforeAndAfterAll with SparkContextProvider {
  self: Suite =>

  @transient private var _sc: SparkContext = _

  override def sc: SparkContext = _sc

  override def beforeAll() {
    _sc = new SparkContext(conf)
    setup(_sc)
    super.beforeAll()
  }

  override def afterAll() {
    try {
      LocalSparkContext.stop(_sc)
      _sc = null
    } finally {
      super.afterAll()
    }
  }
} 
Example 94
Source File: SharedSparkContext.scala    From spark-testing-base   with Apache License 2.0 5 votes vote down vote up
package com.holdenkarau.spark.testing

import java.util.Date

import org.apache.spark._
import org.scalatest.{BeforeAndAfterAll, Suite}


trait SharedSparkContext extends BeforeAndAfterAll with SparkContextProvider {
  self: Suite =>

  @transient private var _sc: SparkContext = _

  override def sc: SparkContext = _sc

  protected implicit def reuseContextIfPossible: Boolean = false

  override def beforeAll() {
    // This is kind of a hack, but if we've got an existing Spark Context
    // hanging around we need to kill it.
    if (!reuseContextIfPossible) {
      EvilSparkContext.stopActiveSparkContext()
    }
    _sc = SparkContext.getOrCreate(conf)
    setup(_sc)
    super.beforeAll()
  }

  override def afterAll() {
    try {
      if (!reuseContextIfPossible) {
        LocalSparkContext.stop(_sc)
        _sc = null
      }
    } finally {
      super.afterAll()
    }
  }
} 
Example 95
Source File: TrafficLightFSM.scala    From Akka-Cookbook   with MIT License 5 votes vote down vote up
package com.packt.chapter10

import java.util.Date

import akka.actor.{Actor, ActorLogging, ActorRef, FSM}
import TrafficLightFSM._
import scala.concurrent.duration._

object TrafficLightFSM {
  sealed trait TrafficLightState
  case object Green extends TrafficLightState
  case object Yellow extends TrafficLightState
  case object Red extends TrafficLightState

  sealed trait Data
  case class Countdown(i: Int) extends Data

  //Events
  case object Tick
  case class ReportChange(to: TrafficLightState, date: Date)
}

class TrafficLightFSM(changesSubscriber: ActorRef) extends Actor with ActorLogging with FSM[TrafficLightState, Data]{
  import context.dispatcher

  trafficLightState(Green, Yellow, 2)
  trafficLightState(Yellow, Red, 4)
  trafficLightState(Red, Green, 8)
  startWith(Green, Countdown(8))

  initialize()
  scheduleTick()

  onTransition {
    case Green -> Yellow => changesSubscriber ! ReportChange(Yellow, new Date())
    case Yellow -> Red => changesSubscriber ! ReportChange(Red, new Date())
    case Red -> Green => changesSubscriber ! ReportChange(Green, new Date())
  }

  private def scheduleTick() = {
    context.system.scheduler.scheduleOnce(1 second, self, Tick)
  }

  private def trafficLightState(
                                 trafficLightState: TrafficLightState,
                                 nextTrafficLightState: TrafficLightState,
                                 totalSecondsNextState: Int) = {
    when(trafficLightState) {
      case Event(Tick, Countdown(i)) if i != 0 =>
        scheduleTick()
        log.info(s"Current state [$trafficLightState]. Countdown: [$i].")
        stay using Countdown(i - 1)
      case Event(Tick, Countdown(i)) if i == 0 =>
        scheduleTick()
        log.info(s"Changing from $trafficLightState to $nextTrafficLightState.")
        goto(nextTrafficLightState) using Countdown(totalSecondsNextState)
    }
  }
} 
Example 96
Source File: ClientEntity.scala    From Akka-Cookbook   with MIT License 5 votes vote down vote up
package com.packt.chapter11.trip.impl

import java.util.Date
import akka.Done
import com.lightbend.lagom.scaladsl.persistence.PersistentEntity

class ClientEntity extends PersistentEntity {
  override type Command = ClientCommand[_]
  override type Event = ClientEvent
  override type State = ClientState

  override def initialState = ClientState(false, Nil)

  override def behavior: Behavior =
    Actions()
      .onCommand[StartTrip.type, Done] {
      case (_, ctx, state) if !state.tripInProgress =>
        ctx.thenPersist(TripStarted(new Date().getTime)) { _ => ctx.reply(Done) }
      case (_, ctx, _) =>
        ctx.invalidCommand("The trip has started already.")
        ctx.done
    }
      .onCommand[EndTrip.type, Done] {
      case (_, ctx, state) if state.tripInProgress =>
        ctx.thenPersist(TripEnded(new Date().getTime)) { _ => ctx.reply(Done) }
      case (_, ctx, _)  =>
        ctx.invalidCommand("The trip has not started.")
        ctx.done
    }
      .onCommand[AddLocation, Done] {
      case (AddLocation(req), ctx, state) if state.tripInProgress =>
        ctx.thenPersist(LocationAdded(Location(req.latitude, req.longitude))) { _ => ctx.reply(Done) }
      case (_, ctx, _) =>
        ctx.invalidCommand("The trip has not started.")
        ctx.done
    }
      .onEvent {
        case (TripStarted(_), _) => ClientState(true, Nil)
        case (TripEnded(_), _) => ClientState(false, Nil)
        case (LocationAdded(loc), state) => state.copy(locations = state.locations :+ loc)
      }
} 
Example 97
Source File: BarLoader.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package impl

import java.nio.file.Files
import java.nio.file.StandardOpenOption
import java.util.Date

import com.lightbend.lagom.scaladsl.api.ServiceLocator.NoServiceLocator
import com.lightbend.lagom.scaladsl.server._
import com.lightbend.lagom.scaladsl.devmode.LagomDevModeComponents
import play.api.libs.ws.ahc.AhcWSComponents
import api.BarService
import api.FooService
import com.softwaremill.macwire._

class BarLoader extends LagomApplicationLoader {

  override def load(context: LagomApplicationContext): LagomApplication =
    new BarApplication(context) {
      override def serviceLocator = NoServiceLocator
    }

  override def loadDevMode(context: LagomApplicationContext): LagomApplication =
    new BarApplication(context) with LagomDevModeComponents
}

abstract class BarApplication(context: LagomApplicationContext) extends LagomApplication(context) with AhcWSComponents {

  override lazy val lagomServer = serverFor[BarService](wire[BarServiceImpl])

  lazy val fooService = serviceClient.implement[FooService]

  Files.write(
    environment.getFile("target/reload.log").toPath,
    s"${new Date()} - reloaded\n".getBytes("utf-8"),
    StandardOpenOption.CREATE,
    StandardOpenOption.APPEND
  )
} 
Example 98
Source File: BazLoader.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package impl

import java.nio.file.Files
import java.nio.file.StandardOpenOption
import java.util.Date

import com.lightbend.lagom.scaladsl.api.ServiceLocator.NoServiceLocator
import com.lightbend.lagom.scaladsl.server._
import com.lightbend.lagom.scaladsl.devmode.LagomDevModeComponents
import play.api.libs.ws.ahc.AhcWSComponents
import api.BazService
import com.softwaremill.macwire._

class BazLoader extends LagomApplicationLoader {

  override def load(context: LagomApplicationContext): LagomApplication =
    new BazApplication(context) {
      override def serviceLocator = NoServiceLocator
    }

  override def loadDevMode(context: LagomApplicationContext): LagomApplication =
    new BazApplication(context) with LagomDevModeComponents
}

abstract class BazApplication(context: LagomApplicationContext) extends LagomApplication(context) with AhcWSComponents {

  override lazy val lagomServer = serverFor[BazService](wire[BazServiceImpl])

  Files.write(
    environment.getFile("target/reload.log").toPath,
    s"${new Date()} - reloaded\n".getBytes("utf-8"),
    StandardOpenOption.CREATE,
    StandardOpenOption.APPEND
  )
} 
Example 99
Source File: FooLoader.scala    From lagom   with Apache License 2.0 5 votes vote down vote up
package impl

import java.nio.file.Files
import java.nio.file.StandardOpenOption
import java.util.Date

import com.lightbend.lagom.scaladsl.api.ServiceLocator.NoServiceLocator
import com.lightbend.lagom.scaladsl.server._
import com.lightbend.lagom.scaladsl.devmode.LagomDevModeComponents
import play.api.libs.ws.ahc.AhcWSComponents
import api.FooService
import com.softwaremill.macwire._

class FooLoader extends LagomApplicationLoader {

  override def load(context: LagomApplicationContext): LagomApplication =
    new FooApplication(context) {
      override def serviceLocator = NoServiceLocator
    }

  override def loadDevMode(context: LagomApplicationContext): LagomApplication =
    new FooApplication(context) with LagomDevModeComponents
}

abstract class FooApplication(context: LagomApplicationContext) extends LagomApplication(context) with AhcWSComponents {

  override lazy val lagomServer = serverFor[FooService](wire[FooServiceImpl])

  Files.write(
    environment.getFile("target/reload.log").toPath,
    s"${new Date()} - reloaded\n".getBytes("utf-8"),
    StandardOpenOption.CREATE,
    StandardOpenOption.APPEND
  )
} 
Example 100
Source File: MesosDriverDescription.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.mesos

import java.util.Date

import org.apache.spark.SparkConf
import org.apache.spark.deploy.Command
import org.apache.spark.scheduler.cluster.mesos.MesosClusterRetryState


private[spark] class MesosDriverDescription(
    val name: String,
    val jarUrl: String,
    val mem: Int,
    val cores: Double,
    val supervise: Boolean,
    val command: Command,
    schedulerProperties: Map[String, String],
    val submissionId: String,
    val submissionDate: Date,
    val retryState: Option[MesosClusterRetryState] = None)
  extends Serializable {

  val conf = new SparkConf(false)
  schedulerProperties.foreach {case (k, v) => conf.set(k, v)}

  def copy(
      name: String = name,
      jarUrl: String = jarUrl,
      mem: Int = mem,
      cores: Double = cores,
      supervise: Boolean = supervise,
      command: Command = command,
      schedulerProperties: SparkConf = conf,
      submissionId: String = submissionId,
      submissionDate: Date = submissionDate,
      retryState: Option[MesosClusterRetryState] = retryState): MesosDriverDescription = {

    new MesosDriverDescription(name, jarUrl, mem, cores, supervise, command, conf.getAll.toMap,
      submissionId, submissionDate, retryState)
  }

  override def toString: String = s"MesosDriverDescription (${command.mainClass})"
} 
Example 101
Source File: PMMLModelExport.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.pmml.export

import java.text.SimpleDateFormat
import java.util.{Date, Locale}

import scala.beans.BeanProperty

import org.dmg.pmml.{Application, Header, PMML, Timestamp}

private[mllib] trait PMMLModelExport {

  
  @BeanProperty
  val pmml: PMML = {
    val version = getClass.getPackage.getImplementationVersion
    val app = new Application("Apache Spark MLlib").setVersion(version)
    val timestamp = new Timestamp()
      .addContent(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US).format(new Date()))
    val header = new Header()
      .setApplication(app)
      .setTimestamp(timestamp)
    new PMML("4.2", header, null)
  }
} 
Example 102
Source File: DriverInfo.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master

import java.util.Date

import org.apache.spark.deploy.DriverDescription
import org.apache.spark.util.Utils

private[deploy] class DriverInfo(
    val startTime: Long,
    val id: String,
    val desc: DriverDescription,
    val submitDate: Date)
  extends Serializable {

  @transient var state: DriverState.Value = DriverState.SUBMITTED
  
  @transient var worker: Option[WorkerInfo] = None

  init()

  private def readObject(in: java.io.ObjectInputStream): Unit = Utils.tryOrIOException {
    in.defaultReadObject()
    init()
  }

  private def init(): Unit = {
    state = DriverState.SUBMITTED
    worker = None
    exception = None
  }
} 
Example 103
Source File: AllStagesResourceSuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.status.api.v1

import java.util.Date

import scala.collection.mutable.LinkedHashMap

import org.apache.spark.SparkFunSuite
import org.apache.spark.scheduler.{StageInfo, TaskInfo, TaskLocality}
import org.apache.spark.ui.jobs.UIData.{StageUIData, TaskUIData}

class AllStagesResourceSuite extends SparkFunSuite {

  def getFirstTaskLaunchTime(taskLaunchTimes: Seq[Long]): Option[Date] = {
    val tasks = new LinkedHashMap[Long, TaskUIData]
    taskLaunchTimes.zipWithIndex.foreach { case (time, idx) =>
      tasks(idx.toLong) = TaskUIData(
        new TaskInfo(idx, idx, 1, time, "", "", TaskLocality.ANY, false), None)
    }

    val stageUiData = new StageUIData()
    stageUiData.taskData = tasks
    val status = StageStatus.ACTIVE
    val stageInfo = new StageInfo(
      1, 1, "stage 1", 10, Seq.empty, Seq.empty, "details abc")
    val stageData = AllStagesResource.stageUiToStageData(status, stageInfo, stageUiData, false)

    stageData.firstTaskLaunchedTime
  }

  test("firstTaskLaunchedTime when there are no tasks") {
    val result = getFirstTaskLaunchTime(Seq())
    assert(result == None)
  }

  test("firstTaskLaunchedTime when there are tasks but none launched") {
    val result = getFirstTaskLaunchTime(Seq(-100L, -200L, -300L))
    assert(result == None)
  }

  test("firstTaskLaunchedTime when there are tasks and some launched") {
    val result = getFirstTaskLaunchTime(Seq(-100L, 1449255596000L, 1449255597000L))
    assert(result == Some(new Date(1449255596000L)))
  }

} 
Example 104
Source File: MasterWebUISuite.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master.ui

import java.io.DataOutputStream
import java.net.{HttpURLConnection, URL}
import java.nio.charset.StandardCharsets
import java.util.Date

import scala.collection.mutable.HashMap

import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest.BeforeAndAfterAll

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.DeployMessages.{KillDriverResponse, RequestKillDriver}
import org.apache.spark.deploy.DeployTestUtils._
import org.apache.spark.deploy.master._
import org.apache.spark.rpc.{RpcEndpointRef, RpcEnv}


class MasterWebUISuite extends SparkFunSuite with BeforeAndAfterAll {

  val conf = new SparkConf
  val securityMgr = new SecurityManager(conf)
  val rpcEnv = mock(classOf[RpcEnv])
  val master = mock(classOf[Master])
  val masterEndpointRef = mock(classOf[RpcEndpointRef])
  when(master.securityMgr).thenReturn(securityMgr)
  when(master.conf).thenReturn(conf)
  when(master.rpcEnv).thenReturn(rpcEnv)
  when(master.self).thenReturn(masterEndpointRef)
  val masterWebUI = new MasterWebUI(master, 0)

  override def beforeAll() {
    super.beforeAll()
    masterWebUI.bind()
  }

  override def afterAll() {
    masterWebUI.stop()
    super.afterAll()
  }

  test("kill application") {
    val appDesc = createAppDesc()
    // use new start date so it isn't filtered by UI
    val activeApp = new ApplicationInfo(
      new Date().getTime, "app-0", appDesc, new Date(), null, Int.MaxValue)

    when(master.idToApp).thenReturn(HashMap[String, ApplicationInfo]((activeApp.id, activeApp)))

    val url = s"http://localhost:${masterWebUI.boundPort}/app/kill/"
    val body = convPostDataToString(Map(("id", activeApp.id), ("terminate", "true")))
    val conn = sendHttpRequest(url, "POST", body)
    conn.getResponseCode

    // Verify the master was called to remove the active app
    verify(master, times(1)).removeApplication(activeApp, ApplicationState.KILLED)
  }

  test("kill driver") {
    val activeDriverId = "driver-0"
    val url = s"http://localhost:${masterWebUI.boundPort}/driver/kill/"
    val body = convPostDataToString(Map(("id", activeDriverId), ("terminate", "true")))
    val conn = sendHttpRequest(url, "POST", body)
    conn.getResponseCode

    // Verify that master was asked to kill driver with the correct id
    verify(masterEndpointRef, times(1)).ask[KillDriverResponse](RequestKillDriver(activeDriverId))
  }

  private def convPostDataToString(data: Map[String, String]): String = {
    (for ((name, value) <- data) yield s"$name=$value").mkString("&")
  }

  
  private def sendHttpRequest(
      url: String,
      method: String,
      body: String = ""): HttpURLConnection = {
    val conn = new URL(url).openConnection().asInstanceOf[HttpURLConnection]
    conn.setRequestMethod(method)
    if (body.nonEmpty) {
      conn.setDoOutput(true)
      conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded")
      conn.setRequestProperty("Content-Length", Integer.toString(body.length))
      val out = new DataOutputStream(conn.getOutputStream)
      out.write(body.getBytes(StandardCharsets.UTF_8))
      out.close()
    }
    conn
  }
} 
Example 105
Source File: DeployTestUtils.scala    From sparkoscope   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy

import java.io.File
import java.util.Date

import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.{DriverRunner, ExecutorRunner}

private[deploy] object DeployTestUtils {
  def createAppDesc(): ApplicationDescription = {
    val cmd = new Command("mainClass", List("arg1", "arg2"), Map(), Seq(), Seq(), Seq())
    new ApplicationDescription("name", Some(4), 1234, cmd, "appUiUrl")
  }

  def createAppInfo() : ApplicationInfo = {
    val appDesc = createAppDesc()
    val appInfo = new ApplicationInfo(JsonConstants.appInfoStartTime,
      "id", appDesc, JsonConstants.submitDate, null, Int.MaxValue)
    appInfo.endTime = JsonConstants.currTimeInMillis
    appInfo
  }

  def createDriverCommand(): Command = new Command(
    "org.apache.spark.FakeClass", Seq("some arg --and-some options -g foo"),
    Map(("K1", "V1"), ("K2", "V2")), Seq("cp1", "cp2"), Seq("lp1", "lp2"), Seq("-Dfoo")
  )

  def createDriverDesc(): DriverDescription =
    new DriverDescription("hdfs://some-dir/some.jar", 100, 3, false, createDriverCommand())

  def createDriverInfo(): DriverInfo = new DriverInfo(3, "driver-3",
    createDriverDesc(), new Date())

  def createWorkerInfo(): WorkerInfo = {
    val workerInfo = new WorkerInfo("id", "host", 8080, 4, 1234, null, "http://publicAddress:80")
    workerInfo.lastHeartbeat = JsonConstants.currTimeInMillis
    workerInfo
  }

  def createExecutorRunner(execId: Int): ExecutorRunner = {
    new ExecutorRunner(
      "appId",
      execId,
      createAppDesc(),
      4,
      1234,
      null,
      "workerId",
      "host",
      123,
      "publicAddress",
      new File("sparkHome"),
      new File("workDir"),
      "spark://worker",
      new SparkConf,
      Seq("localDir"),
      ExecutorState.RUNNING)
  }

  def createDriverRunner(driverId: String): DriverRunner = {
    val conf = new SparkConf()
    new DriverRunner(
      conf,
      driverId,
      new File("workDir"),
      new File("sparkHome"),
      createDriverDesc(),
      null,
      "spark://worker",
      new SecurityManager(conf))
  }
} 
Example 106
Source File: JwtTokenGenerator.scala    From scala-play-realworld-example-app   with MIT License 5 votes vote down vote up
package authentication.jwt.services

import java.time.Duration
import java.util.Date

import authentication.api.TokenGenerator
import authentication.models.{IdProfile, JwtToken}
import commons.repositories.DateTimeProvider
import io.jsonwebtoken.{Jwts, SignatureAlgorithm}

private[authentication] class JwtTokenGenerator(dateTimeProvider: DateTimeProvider, secretProvider: SecretProvider)
  extends TokenGenerator[IdProfile, JwtToken] {

  private val tokenDuration = Duration.ofHours(1)

  override def generate(profile: IdProfile): JwtToken = {
    val signedToken = Jwts.builder
      .setExpiration(Date.from(expiredAt))
      .claim(JwtTokenGenerator.securityUserIdClaimName, profile.securityUserId.value.toString)
      .signWith(SignatureAlgorithm.HS256, secretProvider.get)
      .compact()

    JwtToken(signedToken)
  }

  private def expiredAt = {
    val now = dateTimeProvider.now
    now.plus(tokenDuration)
  }
}

private[authentication] object JwtTokenGenerator {
  val securityUserIdClaimName: String = "security_user_id"
} 
Example 107
Source File: DriverInfo.scala    From SparkCore   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master

import java.util.Date

import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.deploy.DriverDescription
import org.apache.spark.util.Utils

private[spark] class DriverInfo(
    val startTime: Long,
    val id: String,
    val desc: DriverDescription,
    val submitDate: Date)
  extends Serializable {

  @transient var state: DriverState.Value = DriverState.SUBMITTED
  
  @transient var worker: Option[WorkerInfo] = None

  init()

  private def readObject(in: java.io.ObjectInputStream): Unit = Utils.tryOrIOException {
    in.defaultReadObject()
    init()
  }

  private def init(): Unit = {
    state = DriverState.SUBMITTED
    worker = None
    exception = None
  }
} 
Example 108
Source File: CustomScalarSpec.scala    From sangria   with Apache License 2.0 5 votes vote down vote up
package sangria.schema

import java.text.SimpleDateFormat
import java.util.Date

import sangria.ast
import sangria.util.Pos
import sangria.util.SimpleGraphQlSupport._
import sangria.validation.ValueCoercionViolation

import scala.util.{Failure, Success, Try}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec

class CustomScalarSpec extends AnyWordSpec with Matchers {
  "Schema" should {
    "allow to define custom scalar types" in {
      val dateFormat = new SimpleDateFormat("yyyy-MM-dd")

      case object DateCoercionViolation extends ValueCoercionViolation("Date value expected")

      def parseDate(s: String) = Try(dateFormat.parse(s)) match {
        case Success(d) => Right(d)
        case Failure(error) => Left(DateCoercionViolation)
      }

      val DateType = ScalarType[Date]("Date",
        description = Some("An example of date scalar type"),
        coerceOutput = (d, _) => dateFormat.format(d),
        coerceUserInput = {
          case s: String => parseDate(s)
          case _ => Left(DateCoercionViolation)
        },
        coerceInput = {
          case ast.StringValue(s, _, _, _, _) => parseDate(s)
          case _ => Left(DateCoercionViolation)
        })

      val DateArg = Argument("dateInput", DateType)

      val QueryType = ObjectType("Query", fields[Unit, Unit](
        Field("foo", DateType,
          arguments = DateArg :: Nil,
          resolve = ctx => {
            val date: Date = ctx.arg(DateArg)
            new Date(date.getTime + 1000 * 60 * 60 * 24 * 5)
          })
      ))

      val schema = Schema(QueryType)

      check(schema, (),
        """
          {
            foo(dateInput: "2015-05-11")
          }
        """,
        Map("data" -> Map("foo" -> "2015-05-16"))
      )

      checkContainsErrors(schema, (),
        """
          {
            foo(dateInput: "2015-05-test")
          }
        """,
        null,
        List("""Expected type 'Date!', found '"2015-05-test"'. Date value expected""" -> List(Pos(3, 28)))
      )
    }
  }
} 
Example 109
Source File: CarbonShowStreamsCommand.scala    From carbondata   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.sql.execution.command.stream

import java.util.Date
import java.util.concurrent.TimeUnit

import org.apache.spark.sql.{CarbonEnv, Row, SparkSession}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.execution.command.MetadataCommand
import org.apache.spark.sql.types.StringType

import org.apache.carbondata.stream.StreamJobManager


case class CarbonShowStreamsCommand(
    tableOp: Option[TableIdentifier]
) extends MetadataCommand {
  override def output: Seq[Attribute] = {
    Seq(AttributeReference("Stream Name", StringType, nullable = false)(),
      AttributeReference("JobId", StringType, nullable = false)(),
      AttributeReference("Status", StringType, nullable = false)(),
      AttributeReference("Source", StringType, nullable = false)(),
      AttributeReference("Sink", StringType, nullable = false)(),
      AttributeReference("Start Time", StringType, nullable = false)(),
      AttributeReference("Time Elapse", StringType, nullable = false)())
  }

  override def processMetadata(sparkSession: SparkSession): Seq[Row] = {
    val jobs = tableOp match {
      case None => StreamJobManager.getAllJobs.toSeq
      case Some(table) =>
        val carbonTable = CarbonEnv.getCarbonTable(table.database, table.table)(sparkSession)
        setAuditTable(carbonTable)
        StreamJobManager.getAllJobs.filter { job =>
          job.sinkTable.equalsIgnoreCase(carbonTable.getTableName) &&
          job.sinkDb.equalsIgnoreCase(carbonTable.getDatabaseName)
        }.toSeq
    }

    jobs.map { job =>
      val elapsedTime = System.currentTimeMillis() - job.startTime
      Row(
        job.streamName,
        job.streamingQuery.id.toString,
        if (job.streamingQuery.isActive) "RUNNING" else "FAILED",
        s"${ job.sourceDb }.${ job.sourceTable }",
        s"${ job.sinkDb }.${ job.sinkTable }",
        new Date(job.startTime).toString,
        String.format(
          "%s days, %s hours, %s min, %s sec",
          TimeUnit.MILLISECONDS.toDays(elapsedTime).toString,
          TimeUnit.MILLISECONDS.toHours(elapsedTime).toString,
          TimeUnit.MILLISECONDS.toMinutes(elapsedTime).toString,
          TimeUnit.MILLISECONDS.toSeconds(elapsedTime).toString)
      )
    }
  }

  override protected def opName: String = "SHOW STREAMS"
} 
Example 110
Source File: ReservationAggregateRoot.scala    From ddd-leaven-akka-v2   with MIT License 5 votes vote down vote up
package ecommerce.sales

import java.util.Date

import pl.newicom.dddd.actor.{Config, ConfigClass}
import pl.newicom.dddd.aggregate._
import pl.newicom.dddd.office.LocalOfficeId
import pl.newicom.dddd.office.LocalOfficeId.fromRemoteId

object ReservationAggregateRoot extends AggregateRootSupport {

  sealed trait Reservation extends Behavior[Event, Reservation, Config] {

    def canceledOrClosed: Actions =
      handleCommand {
        case CloseReservation(reservationId) =>
          ReservationClosed(reservationId)

        case CancelReservation(reservationId) =>
          ReservationCanceled(reservationId)
      }
      .handleEvent {
        case ReservationCanceled(_) => Canceled
        case ReservationClosed(_) => Closed
      }

  }

  implicit case object Uninitialized extends Reservation with Uninitialized[Reservation] {

    def actions: Actions =
      handleCommand {
        case CreateReservation(reservationId, clientId) =>
          ReservationCreated(reservationId, clientId)
      }
      .handleEvent {
        case ReservationCreated(_, customerId) =>
          Opened(customerId, items = List.empty, createDate = new Date)
      }

  }

  case class Opened(customerId: EntityId, items: List[ReservationItem], createDate: Date) extends Reservation {

    def actions: Actions =
      handleCommand {
        case ReserveProduct(reservationId, product, quantity) =>
          ProductReserved(reservationId, product, quantity)

        case ConfirmReservation(reservationId) =>
          ReservationConfirmed(reservationId, customerId, totalAmount)
      }
      .handleEvent {
        case ProductReserved(_, product, quantity) =>
          val newItems = items.find(item => item.productId == product.id) match {
            case Some(orderLine) =>
              val index = items.indexOf(orderLine)
              items.updated(index, orderLine.increaseQuantity(quantity))
            case None =>
              ReservationItem(product, quantity) :: items
          }
          copy(items = newItems)

        case _: ReservationConfirmed =>
          Confirmed
      }
      .orElse(canceledOrClosed)

    def totalAmount: Money =
      items.foldLeft(Money()) {
        (m, item) => item.product.price + m
      }

  }

  case object Confirmed extends Reservation {
    def actions: Actions = canceledOrClosed
  }

  case object Canceled extends Reservation {
    def actions: Actions = canceledOrClosed
  }

  case object Closed extends Reservation {
    def actions: Actions = noActions
  }

  implicit val officeId: LocalOfficeId[ReservationAggregateRoot] = fromRemoteId[ReservationAggregateRoot](ReservationOfficeId)

}

import ecommerce.sales.ReservationAggregateRoot._

class ReservationAggregateRoot(val config: Config) extends AggregateRoot[Event, Reservation, ReservationAggregateRoot] with ConfigClass[Config] 
Example 111
Source File: spark_algo.scala    From mllib_subpackage   with Apache License 2.0 5 votes vote down vote up
import org.apache.commons.cli.{Options, PosixParser}
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
//import org.apache.hadoop.fs
import java.util.Date
import java.util.Calendar



import org.apache.hadoop.fs.FileSystem
//import sun.management.FileSystem


object spark_algo {
  def main(args: Array[String]) {

    // Input Params
    val parser = new PosixParser( )
    val options = new Options( )
    options.addOption("a", "algo", true, "algo type; 10. sgd 11. lbfgs")
    val cl = parser.parse( options, args, true )
    val algo = cl.getOptionValue("algo")

    val conf = new SparkConf()
    val sc = new SparkContext(conf)

    sc.getConf.getAll.foreach(println)

    val configuration = sc.hadoopConfiguration
    configuration.setBoolean("mapreduce.output.fileoutputformat.compress", false)
    val fs = FileSystem.get(configuration)
    val modeltmp = if(algo=="10" || algo=="11" || algo=="12" || algo=="13") {
      new mllib_lr(sc, fs, args)
    } else if(algo=="21") {
      new ftrl(sc, fs, args)
    } else if(algo=="22") {
      new ftrl_batch(sc, fs, args)
    } else if(algo=="31") {
      new relative(sc, fs, args)
    } else if(algo=="40") {
      new mllib_gbdt(sc, fs, args)
    } else if(algo=="41") {
      new lambda_mart(sc, fs, args)
    } else if(algo=="91") {
      new feature_analyse(sc, fs, args)
    } else if(algo=="docs_words_analyse") {
      new docs_words_analyse(sc, fs, args)
    }

    val model = modeltmp.asInstanceOf[malgo]
    model.deal()
  }



} 
Example 112
Source File: Timer.scala    From learn-scala-java-devs   with Apache License 2.0 5 votes vote down vote up
package s4j.scala.chapter17

import java.util.Date

object TimerExample {
  
  def exampleTimer() = {
    val timer = new NaiveTimer()
    timer.start()
    someLongRunningTask()
    val time = timer.stop()
    println("process took " + time + "ms")
  }

  def anotherExampleTimer() = {
    val timer = Timer()
    timer.time {
      someLongRunningTask()
      null
    }
    println("process took " + timer + "ms")
  }

  def yetAnotherExampleTimer() = {
    val timer = Timer()
    timer.time(() => {
      someLongRunningTask()
    })
    println("process took " + timer + "ms")
  }

  def someLongRunningTask() = Thread.sleep(1000)
}

class NaiveTimer {
  private var startDate: Date = null

  def start() = startDate = new Date

  def stop(): Long = new Date().getTime - startDate.getTime
}

object Timer {
  def apply() = new Timer()
}

class Timer {

  private val start = new Date

  def time(function: () => Unit) {
    try {
      function.apply()
    } finally {
      new Date().getTime - start.getTime
    }
  }
  
} 
Example 113
Source File: TestNestingFields.scala    From kafka-connect-transformers   with Apache License 2.0 5 votes vote down vote up
package com.datamountaineer.streamreactor.connect.transforms

import java.util.Date

import com.datamountaineer.streamreactor.connect.transforms.NestingFields.Value
import org.apache.kafka.connect.data._
import org.apache.kafka.connect.source.SourceRecord
import org.apache.kafka.connect.transforms.util.Requirements.requireStruct
import org.scalatest.{Matchers, WordSpec}

import scala.collection.JavaConversions._

class TestNestingFields extends WordSpec with Matchers {
  val OPTIONAL_TIMESTAMP_SCHEMA = Timestamp.builder().optional().build()
  val OPTIONAL_DECIMAL_SCHEMA = Decimal.builder(18).optional().build()

  private val NESTED_NAME_CONFIG = "nested.name"
  private val FIELDS_CONFIG = "fields"

  "should append another field with two nested fields when have schema" in {
    val transform = new Value[SourceRecord];
    transform.configure(Map(
      NESTED_NAME_CONFIG -> "id",
      FIELDS_CONFIG -> "dateValue1, decimalValue1")
    )

    val transformedRecord = transform.apply(mockRecord(true));

    val value = requireStruct(transformedRecord.value, null)
    val schema = transformedRecord.valueSchema

    val nestedSchema = schema.field("id").schema()
    val nestedValue =  requireStruct(value.get("id"), null)

    nestedSchema.field("dateValue1").schema().`type`() shouldBe schema.field("dateValue1").schema().`type`()
    nestedValue.get("dateValue1") shouldBe value.get("dateValue1")

    nestedSchema.field("decimalValue1").schema().`type`() shouldBe schema.field("decimalValue1").schema().`type`()
    nestedValue.get("decimalValue1") shouldBe value.get("decimalValue1")
  }

  "should append another field with one nested fields when have schema" in {
    val transform = new Value[SourceRecord];
    transform.configure(Map(
      NESTED_NAME_CONFIG -> "id",
      FIELDS_CONFIG -> "decimalValue1")
    )

    val transformedRecord = transform.apply(mockRecord(true));

    val value = requireStruct(transformedRecord.value, null)
    val schema = transformedRecord.valueSchema

    val nestedSchema = schema.field("id").schema()
    val nestedValue =  requireStruct(value.get("id"), null)

    nestedSchema.field("decimalValue1").schema().`type`() shouldBe schema.field("decimalValue1").schema().`type`()
    nestedValue.get("decimalValue1") shouldBe value.get("decimalValue1")
  }

  "should append another field with one nested fields when don't have schema" in {
    val transform = new Value[SourceRecord];
    transform.configure(Map(
      NESTED_NAME_CONFIG -> "id",
      FIELDS_CONFIG -> "decimalValue1")
    )

    val transformedRecord = transform.apply(mockRecord(true));

    val value = requireStruct(transformedRecord.value, null)

    val nestedValue =  requireStruct(value.get("id"), null)
    nestedValue.get("decimalValue1") shouldBe value.get("decimalValue1")
  }

  private def mockRecord(withSchema: Boolean) = {
    val simpleStructSchema = SchemaBuilder.struct.name("name").version(1).doc("doc")
      .field("magic", Schema.OPTIONAL_INT64_SCHEMA)
      .field("dateValue1", OPTIONAL_TIMESTAMP_SCHEMA)
      .field("decimalValue1", OPTIONAL_DECIMAL_SCHEMA)
      .build

    val simpleStruct = new Struct(simpleStructSchema)
      .put("magic", 42L)
      .put("dateValue1", new Date())
      .put("decimalValue1", BigDecimal(10.6).bigDecimal.setScale(18))

    new SourceRecord(null, null, "test", 0, if (withSchema) simpleStructSchema else null, simpleStruct)
  }

} 
Example 114
Source File: Animation.scala    From threejs-facade   with Mozilla Public License 2.0 5 votes vote down vote up
package org.denigma.threejs.extensions.animations

import java.util.Date

import org.scalajs.dom
import Animation.{Started, AniState}
import scala.concurrent.duration.Duration
import scala.scalajs.js


class Scheduler
{
  def current: Double = js.Date.now
  var animations = List.empty[Animation]

  def add(ani: Animation): Unit = {
    this.animations = ani :: animations
    ani.state = Animation.Running(current)
  }

  def tick(): Unit =   {
    val now = current
    animations.foreach{ani=>
      ani.state match {
        case Animation.Running(start)=> ani(now)
        case _ =>
        // dom.console.info(other.toString)
        // do nothing
      }
    }
  }

  protected def onEnterFrameFunction(double: Double): Unit = {
    this.tick()
    start()
  }

  def start(): Scheduler = {
    dom.window.requestAnimationFrame(onEnterFrameFunction _ )
    this
  }


}

object Easings {

  val linear: Double=>Double = i=>i

}


object Animation{
  trait AniState
  trait Started extends AniState{
    def start: Double

  }
  case object Stopped extends AniState
  case class Finished(start: Double) extends Started
  case class Paused(start: Double) extends Started
  case class Running(start: Double) extends Started
  case class Backward(start: Double, finished: Double) extends Started

}


class Animation(val length: Duration, easing: Double=>Double = Easings.linear)(fun: (Double=>Unit)){

  lazy val lengthMillis: Long = length.toMillis

  var state: AniState = Animation.Stopped

  def apply(current: Double): Unit =    state match {
    case st: Started=>
      val finish: Double = st.start + this.lengthMillis
      easing(1.0-(finish-current)/length.toMillis) match{
        case p: Double if p>=1.0=>
          fun(1.0)
          this.state = Animation.Finished(current)
        case p: Double if p < 0.0=>
          dom.console.error(s"animation percent is $p that is below zero!\n " +
            s"Current time is $current, start is ${st.start} and length is $lengthMillis")
          this.state = Animation.Finished(current)

        case p: Double=>
          fun(p)
        // dom.console.info( s"Current time is $current, start is ${st.start} and length is $lengthMillis and percent is $p")

      }

    case _=> dom.console.error("trying to run an operation that has not started")
  }

  def go(implicit scheduler: Scheduler): Unit = {
    scheduler.add(this)
  }

} 
Example 115
Source File: DateUtils.scala    From common4s   with Apache License 2.0 5 votes vote down vote up
package commons.mapper.utils

import java.text.{ ParseException, ParsePosition, SimpleDateFormat }
import java.util.Date


	def parseDateWithLeniency(str : String, parsePatterns : Array[String], lenient : Boolean) : Date = {
		if (str == null || parsePatterns == null) {
			throw new IllegalArgumentException("Date and Patterns must not be null");
		}

		val parser = new SimpleDateFormat();
		parser.setLenient(lenient);
		val pos = new ParsePosition(0);

		for (parsePattern <- parsePatterns) {

			var pattern = parsePattern;

			// LANG-530 - need to make sure 'ZZ' output doesn't get passed to SimpleDateFormat
			if (parsePattern.endsWith("ZZ")) {
				pattern = pattern.substring(0, pattern.length() - 1);
			}

			parser.applyPattern(pattern);
			pos.setIndex(0);

			var str2 = str;
			// LANG-530 - need to make sure 'ZZ' output doesn't hit SimpleDateFormat as it will ParseException
			if (parsePattern.endsWith("ZZ")) {
				str2 = str.replaceAll("([-+][0-9][0-9]):([0-9][0-9])$", "$1$2");
			}

			val date = parser.parse(str2, pos);
			if (date != null && pos.getIndex() == str2.length()) {
				return date;
			}
		}

		throw new ParseException("Unable to parse the date: " + str, -1);
	}
} 
Example 116
Source File: DateUtil.scala    From real-time-stream-processing-engine   with Apache License 2.0 5 votes vote down vote up
package com.knoldus.streaming.util

import java.text.SimpleDateFormat
import java.util.Date


object DateUtil {


  private val dateFormats = List(
    "yyyyMMdd'T'HHmmss.SSSZ",
    "EEE, dd MMM yyyy HH:mm:ss Z",
    "yyyy-MM-dd HH:mm:ss",
    "EEE MMM dd HH:mm:ss Z yyyy",
    "MMM dd, yyyy, HH:mm a",
    "MMM dd, yyyy HH:mm a",
    "yyyy-MM-dd'T'HH:mm:ss",
    "dd MMM yyyy HH:mm:ss:S Z",
    "E MMM dd HH:mm:ss z yyyy",
    "dd MMM yyyy HH:mm:ss:SSS",
    "dd MMM yyyy H:mm:ss:SSS",
    "MM-dd-yyyy HH:mm:ss:SSS",
    "MM/dd/yyyy HH:mm:ss:SSS",
    "dd/MM/yyyy HH:mm:ss:SSS",
    "dd-MM-yyyy HH:mm:ss:SSS",
    "MMM/dd/yyyy HH:mm:ss:SSS",
    "MMM-dd-yyyy HH:mm:ss:SSS",
    "dd-MMM-yyyy HH:mm:ss:SSS",
    "MM-dd-yyyy H:mm:ss:SSS",
    "MM/dd/yyyy H:mm:ss:SSS",
    "dd/MM/yyyy H:mm:ss:SSS",
    "dd-MM-yyyy H:mm:ss:SSS",
    "MMM/dd/yyyy H:mm:ss:SSS",
    "MMM-dd-yyyy H:mm:ss:SSS",
    "dd-MMM-yyyy H:mm:ss:SSS",
    "MM-dd-yyyy HH:mm:ss",
    "MM/dd/yyyy HH:mm:ss",
    "dd/MM/yyyy HH:mm:ss",
    "dd-MM-yyyy HH:mm:ss",
    "MMM/dd/yyyy HH:mm:ss",
    "MMM-dd-yyyy HH:mm:ss",
    "dd-MMM-yyyy HH:mm:ss",
    "MM-dd-yyyy H:mm:ss",
    "MM/dd/yyyy H:mm:ss",
    "dd/MM/yyyy H:mm:ss",
    "dd-MM-yyyy H:mm:ss",
    "MMM/dd/yyyy H:mm:ss",
    "MMM-dd-yyyy H:mm:ss",
    "dd-MMM-yyyy H:mm:ss",
    "yyyy-MM-dd",
    "MM-dd-yyyy",
    "MM/dd/yyyy",
    "dd/MM/yyyy",
    "dd-MM-yyyy",
    "MMM/dd/yyyy",
    "MMM-dd-yyyy",
    "dd-MMM-yyyy")


  private val esDateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ")

  def getESDateFormat(dateString: String): String = {
    def getDate(dateFormats: Seq[String], dateString: String): String =
      try {
        val dateFormat = new SimpleDateFormat(dateFormats.head)
        val date = dateFormat.parse(dateString)
        esDateFormat.format(date)
      } catch {
        case _ if (dateFormats.size > 1) => getDate(dateFormats.tail, dateString)
        case _: Exception => esDateFormat.format(new Date())
      }
    getDate(dateFormats, dateString)
  }

} 
Example 117
Source File: ApiService.scala    From scalajs-spa-tutorial   with Apache License 2.0 5 votes vote down vote up
package services

import java.util.{UUID, Date}

import spatutorial.shared._

class ApiService extends Api {
  var todos = Seq(
    TodoItem("41424344-4546-4748-494a-4b4c4d4e4f50", 0x61626364, "Wear shirt that says “Life”. Hand out lemons on street corner.", TodoLow, completed = false),
    TodoItem("2", 0x61626364, "Make vanilla pudding. Put in mayo jar. Eat in public.", TodoNormal, completed = false),
    TodoItem("3", 0x61626364, "Walk away slowly from an explosion without looking back.", TodoHigh, completed = false),
    TodoItem("4", 0x61626364, "Sneeze in front of the pope. Get blessed.", TodoNormal, completed = true)
  )

  override def welcomeMsg(name: String): String =
    s"Welcome to SPA, $name! Time is now ${new Date}"

  override def getAllTodos(): Seq[TodoItem] = {
    // provide some fake Todos
    Thread.sleep(300)
    println(s"Sending ${todos.size} Todo items")
    todos
  }

  // update a Todo
  override def updateTodo(item: TodoItem): Seq[TodoItem] = {
    // TODO, update database etc :)
    if(todos.exists(_.id == item.id)) {
      todos = todos.collect {
        case i if i.id == item.id => item
        case i => i
      }
      println(s"Todo item was updated: $item")
    } else {
      // add a new item
      val newItem = item.copy(id = UUID.randomUUID().toString)
      todos :+= newItem
      println(s"Todo item was added: $newItem")
    }
    Thread.sleep(300)
    todos
  }

  // delete a Todo
  override def deleteTodo(itemId: String): Seq[TodoItem] = {
    println(s"Deleting item with id = $itemId")
    Thread.sleep(300)
    todos = todos.filterNot(_.id == itemId)
    todos
  }
} 
Example 118
Source File: RangerSparkAccessRequest.scala    From spark-ranger   with Apache License 2.0 5 votes vote down vote up
package org.apache.ranger.authorization.spark.authorizer

import java.util.Date

import org.apache.ranger.authorization.spark.authorizer.SparkAccessType.SparkAccessType
import org.apache.ranger.plugin.policyengine.{RangerAccessRequestImpl, RangerPolicyEngine}
import org.apache.ranger.plugin.util.RangerAccessRequestUtil

import scala.collection.JavaConverters._

class RangerSparkAccessRequest private extends RangerAccessRequestImpl {

  private var accessType = SparkAccessType.NONE

  def this(
      resource: RangerSparkResource,
      user: String,
      groups: Set[String],
      opType: String,
      accessType: SparkAccessType,
      clusterName: String) {
    this()
    this.setResource(resource)
    this.setUser(user)
    this.setUserGroups(groups.asJava)
    this.setAccessTime(new Date)
    this.setAction(opType)
    this.setSparkAccessType(accessType)
    this.setUser(user)
    this.setClusterName(clusterName)
  }

  def this(resource: RangerSparkResource, user: String, groups: Set[String],
      clusterName: String) = {
    this(resource, user, groups, "METADATA OPERATION", SparkAccessType.USE, clusterName)
  }

  def getSparkAccessType: SparkAccessType = accessType

  def setSparkAccessType(accessType: SparkAccessType): Unit = {
    this.accessType = accessType
    accessType match {
      case SparkAccessType.USE => this.setAccessType(RangerPolicyEngine.ANY_ACCESS)
      case SparkAccessType.ADMIN => this.setAccessType(RangerPolicyEngine.ADMIN_ACCESS)
      case _ => this.setAccessType(accessType.toString.toLowerCase)
    }
  }

  def copy(): RangerSparkAccessRequest = {
    val ret = new RangerSparkAccessRequest()
    ret.setResource(getResource)
    ret.setAccessType(getAccessType)
    ret.setUser(getUser)
    ret.setUserGroups(getUserGroups)
    ret.setAccessTime(getAccessTime)
    ret.setAction(getAction)
    ret.setClientIPAddress(getClientIPAddress)
    ret.setRemoteIPAddress(getRemoteIPAddress)
    ret.setForwardedAddresses(getForwardedAddresses)
    ret.setRequestData(getRequestData)
    ret.setClientType(getClientType)
    ret.setSessionId(getSessionId)
    ret.setContext(RangerAccessRequestUtil.copyContext(getContext))
    ret.accessType = accessType
    ret.setClusterName(getClusterName)
    ret
  }
} 
Example 119
Source File: DataFrameExtensions.scala    From spark-powerbi-connector   with Apache License 2.0 5 votes vote down vote up
package com.microsoft.azure.powerbi.extensions

import java.sql.Timestamp
import java.util.Date

import scala.collection.mutable.ListBuffer

import com.microsoft.azure.powerbi.authentication.PowerBIAuthentication
import com.microsoft.azure.powerbi.common.PowerBIUtils
import com.microsoft.azure.powerbi.models.{table, PowerBIDatasetDetails}

import org.apache.spark.sql.DataFrame

object DataFrameExtensions {

  implicit def PowerBIDataFrame(dataFrame: DataFrame): PowerBIDataFrame =
    new PowerBIDataFrame(dataFrame: DataFrame)

  class PowerBIDataFrame(dataFrame: DataFrame) extends Serializable{

    def toPowerBI(powerbiDatasetDetails: PowerBIDatasetDetails, powerbiTable: table,
                  powerBIAuthentication: PowerBIAuthentication): Unit = {

      var authenticationToken: String = powerBIAuthentication.getAccessToken

      dataFrame.foreachPartition { partition =>

        // PowerBI row limit in single request is 10,000. We limit it to 1000.

        partition.grouped(1000).foreach {
          group => {
            val powerbiRowListBuffer: ListBuffer[Map[String, Any]] = ListBuffer[Map[String, Any]]()
            group.foreach {
              record => {
                var powerbiRow: Map[String, Any] = Map[String, Any]()

                for (i <- 0 until record.length) {
                  powerbiRow += (powerbiTable.columns(i).name -> record(i))
                }

                powerbiRowListBuffer += powerbiRow
              }

              var attemptCount = 0
              var pushSuccessful = false

              while (!pushSuccessful && attemptCount < this.retryCount) {
                try {

                    PowerBIUtils.addMultipleRows(powerbiDatasetDetails, powerbiTable,
                      powerbiRowListBuffer, authenticationToken)
                    pushSuccessful = true
                }
                catch {
                  case e: Exception =>
                    println(f"Exception inserting multiple rows: ${e.getMessage}")
                    Thread.sleep(secondsBetweenRetry * 1000)
                    attemptCount += 1

                    authenticationToken = powerBIAuthentication.refreshAccessToken
                }
              }
            }
          }
        }
      }
    }

    def countTimelineToPowerBI(powerbiDatasetDetails: PowerBIDatasetDetails, powerbiTable: table,
                               powerBIAuthentication: PowerBIAuthentication): Unit = {

      var authenticationToken: String = powerBIAuthentication.getAccessToken
      val currentTimestamp = new Timestamp(new Date().getTime)

      val powerbiRow = Map(powerbiTable.columns.head.name -> currentTimestamp,
        powerbiTable.columns(1).name -> dataFrame.count())

      var attemptCount = 0
      var pushSuccessful = false

      while (!pushSuccessful && attemptCount < this.retryCount) {
        try {
          PowerBIUtils.addRow(powerbiDatasetDetails, powerbiTable, powerbiRow, authenticationToken)
          pushSuccessful = true
        }
        catch {
          case e: Exception => println("Exception inserting row: " + e.getMessage)
            Thread.sleep(secondsBetweenRetry * 1000)
            attemptCount += 1

            authenticationToken = powerBIAuthentication.refreshAccessToken
        }
      }
    }

    private val retryCount: Int = 3
    private val secondsBetweenRetry: Int = 1
  }
} 
Example 120
Source File: KafkaProducer.scala    From spark-ref-architecture   with Apache License 2.0 5 votes vote down vote up
package com.stc.spark.streaming.kafka

import java.util.{Date, Properties}

import kafka.producer.{KeyedMessage, Producer, ProducerConfig}

import scala.util.Random

object KafkaProducer extends App {
  val events = args(0).toInt
  val topic = args(1)
  val brokers = args(2)
  val rnd = new Random()

  val producer = new Producer[String, String](KafkaConfig.config)
  val t = System.currentTimeMillis()
  for (nEvents <- Range(0, events)) {
    val runtime = new Date().getTime();
    val ip = "192.168.2." + rnd.nextInt(255);
    val msg = runtime + "," + nEvents + ",www.example.com," + ip;
    val data = new KeyedMessage[String, String](topic, ip, msg);
    producer.send(data);
  }

  System.out.println("sent per second: " + events * 1000 / (System.currentTimeMillis() - t));
  producer.close();
} 
Example 121
Source File: AnnotatorParam.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.nlp.annotators.param

import java.util.{Date, TimeZone}

import org.apache.spark.ml.param.Param
import org.apache.spark.ml.util.Identifiable
import org.json4s._
import org.json4s.jackson.JsonMethods._
import org.json4s.jackson.Serialization.write


  object SerializableFormat extends Formats with Serializable {
    class SerializableDateFormat extends DateFormat {
      def timezone: TimeZone = throw new Exception("SerializableFormat does not implement dateformat")
      override def format(d: Date): String = throw new Exception("SerializableFormat does not implement dateformat")
      override def parse(s: String): Option[Date] = throw new Exception("SerializableFormat does not implement dateformat")
    }
    override def dateFormat: DateFormat = new SerializableDateFormat
  }

  implicit val formats = SerializableFormat

  override def jsonEncode(value: A): String = write(value.serialize)

  override def jsonDecode(json: String): A = parse(json).extract[B].deserialize
} 
Example 122
Source File: TrainingHelper.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.util

import java.io.File
import java.nio.file.{Files, Paths, StandardCopyOption}
import java.sql.Timestamp
import java.util.Date

import com.johnsnowlabs.nlp.pretrained.ResourceType.ResourceType
import com.johnsnowlabs.nlp.pretrained.{ResourceMetadata, ResourceType}
import org.apache.commons.io.FileUtils
import org.apache.spark.ml.util.MLWriter


object TrainingHelper {

  def saveModel(name: String,
                language: Option[String],
                libVersion: Option[Version],
                sparkVersion: Option[Version],
                modelWriter: MLWriter,
                folder: String,
                category: Option[ResourceType] = Some(ResourceType.NOT_DEFINED)
               ): Unit = {

    // 1. Get current timestamp
    val timestamp = new Timestamp(new Date().getTime)


    // 2. Save model to file
    val file = Paths.get(folder, timestamp.toString).toString.replaceAllLiterally("\\", "/")
    modelWriter.save(file)

    // 3. Zip file
    val tempzipFile = Paths.get(folder, timestamp + ".zip")
    ZipArchiveUtil.zip(file, tempzipFile.toString)

    // 4. Set checksum
    val checksum = FileHelper.generateChecksum(tempzipFile.toString)

    // 5. Create resource metadata
    val meta = new ResourceMetadata(name, language, libVersion, sparkVersion, true, timestamp, true, category = category, checksum)

    val zipfile = Paths.get(meta.fileName)

    // 6. Move the zip
    Files.move(tempzipFile, zipfile, StandardCopyOption.REPLACE_EXISTING)

    // 7. Remove original file
    try {
      FileUtils.deleteDirectory(new File(file))
    } catch {
      case _: java.io.IOException => //file lock may prevent deletion, ignore and continue
    }

      // 6. Add to metadata.json info about resource
      val metadataFile = Paths.get(folder, "metadata.json").toString
      ResourceMetadata.addMetadataToFile(metadataFile, meta)
    }
} 
Example 123
Source File: PragmaticSentimentTestSpec.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.nlp.annotators.sda.pragmatic

import com.johnsnowlabs.nlp.annotators.common.Sentence
import com.johnsnowlabs.nlp._
import com.johnsnowlabs.nlp.annotators.Tokenizer
import com.johnsnowlabs.nlp.util.io.{ExternalResource, ReadAs}
import org.apache.spark.storage.StorageLevel
import org.scalatest._
import org.scalatest.tagobjects.Slow

class PragmaticSentimentBigTestSpec extends FlatSpec {

  "Parquet based data" should "be sentiment detected properly" taggedAs Slow in {
    import java.util.Date

    val data = ContentProvider.parquetData.limit(1000)

    val documentAssembler = new DocumentAssembler()
      .setInputCol("text")

    val assembled = documentAssembler.transform(data)

    val sentimentDetector = new SentimentDetector()

    val readyData = AnnotatorBuilder.withFullPOSTagger(AnnotatorBuilder.withFullLemmatizer(assembled))
    
    val result = sentimentDetector
      .setInputCols(Array("token", "sentence"))
      .setOutputCol("my_sda_scores")
      .setDictionary(ExternalResource("src/test/resources/sentiment-corpus/default-sentiment-dict.txt", ReadAs.TEXT, Map("delimiter" -> ",")))
      .setEnableScore(false)
      .fit(readyData)
      .transform(readyData)

    import Annotation.extractors._

    val date1 = new Date().getTime
    result.show(2)
    info(s"20 show sample of disk based sentiment analysis took: ${(new Date().getTime - date1)/1000} seconds")

    val date2 = new Date().getTime
    result.take("my_sda_scores", 5000)
    info(s"5000 take sample of disk based sentiment analysis took: ${(new Date().getTime - date2)/1000} seconds")

    val dataFromMemory = readyData.persist(StorageLevel.MEMORY_AND_DISK)
    info(s"data in memory is of size: ${dataFromMemory.count}")
    val resultFromMemory = sentimentDetector.fit(dataFromMemory).transform(dataFromMemory)

    val date3 = new Date().getTime
    resultFromMemory.show
    info(s"20 show sample of memory based sentiment analysis took: ${(new Date().getTime - date3)/1000} seconds")

    val date4 = new Date().getTime
    resultFromMemory.take("my_sda_scores", 5000)
    info(s"5000 take sample of memory based sentiment analysis took: ${(new Date().getTime - date4)/1000} seconds")

    succeed
  }

}

class PragmaticSentimentTestSpec extends FlatSpec with PragmaticSentimentBehaviors {

  val sentimentSentenceTexts = "The staff of the restaurant is nice and the eggplant is bad " +
    "I recommend others to avoid because it is too expensive"

  val sentimentSentences = {
    new Tokenizer().fit(ContentProvider.parquetData).tag(Sentence.fromTexts(sentimentSentenceTexts)).toArray
  }

  "an isolated sentiment detector" should behave like isolatedSentimentDetector(sentimentSentences, -4.0)

  "a spark based sentiment detector" should behave like sparkBasedSentimentDetector(
    DataBuilder.basicDataBuild("The staff of the restaurant is nice and the eggplant is bad." +
      " I recommend others to avoid.")
  )

  "A SentimentDetector" should "be readable and writable" in {
    val sentimentDetector = new SentimentDetector().setDictionary(ExternalResource("src/test/resources/sentiment-corpus/default-sentiment-dict.txt", ReadAs.TEXT, Map("delimiter" -> ","))).fit(DataBuilder.basicDataBuild("dummy"))
    val path = "./test-output-tmp/sentimentdetector"
    try {
      sentimentDetector.write.overwrite.save(path)
      val sentimentDetectorRead = SentimentDetectorModel.read.load(path)
      assert(sentimentDetector.model.score(sentimentSentences) == sentimentDetectorRead.model.score(sentimentSentences))
    } catch {
      case _: java.io.IOException => succeed
    }
  }

} 
Example 124
Source File: DateMatcherBehaviors.scala    From spark-nlp   with Apache License 2.0 5 votes vote down vote up
package com.johnsnowlabs.nlp.annotators

import java.util.Date

import com.johnsnowlabs.nlp.{Annotation, AnnotatorBuilder}
import org.apache.spark.sql.{Dataset, Row}
import org.scalatest._
import org.scalatest.Matchers._
import com.johnsnowlabs.nlp.AnnotatorType.DATE
import scala.language.reflectiveCalls

trait DateMatcherBehaviors extends FlatSpec {
  def fixture(dataset: Dataset[Row]) = new {
    val df = AnnotatorBuilder.withDateMatcher(dataset)
    val dateAnnotations = df.select("date")
      .collect
      .flatMap { _.getSeq[Row](0) }
      .map { Annotation(_) }
  }

  def sparkBasedDateMatcher(dataset: => Dataset[Row]): Unit = {
    "A DateMatcher Annotator" should s"successfuly parse dates" in {
      val f = fixture(dataset)
      f.dateAnnotations.foreach { a =>
        val d: String = a.result
        d should fullyMatch regex """\d+/\d+/\d+"""
      }
    }

    it should "create annotations" in {
      val f = fixture(dataset)
      assert(f.dateAnnotations.size > 0)
    }

    it should "create annotations with the correct type" in {
      val f = fixture(dataset)
      f.dateAnnotations.foreach { a =>
        assert(a.annotatorType == DATE)
      }
    }
  }
} 
Example 125
Source File: StatsCollector.scala    From akka-nbench   with Apache License 2.0 5 votes vote down vote up
package bench

import akka.actor._

import com.typesafe.config._

import java.util.Date
import java.io._

import Tapper._

class StatsCollector(csvdir: String, config: Config) extends Actor with ActorLogging  with Utils {

  val csvPath = pathJoin(csvdir, "raw.csv")
  val p = new PrintWriter(new BufferedWriter(new FileWriter(new File(csvPath))));
  var count = 0

  def receive = {
    case s: Stat =>
      p.println(s)
      count += 1
      if(count % 1000 == 0) {
        log.debug(s"proccessed $count")
      }
    case TearDown() =>
      log.info(s"total requests: $count")
      log.info(s"dumped csv to $csvPath")

      p.flush
      p.close

      val configPath = pathJoin(csvdir, "config.txt")
      using(new File(configPath)) { p =>
        p.println(config.toString)
      }

      sender ! OK()
  }
} 
Example 126
Source File: Bench.scala    From akka-nbench   with Apache License 2.0 5 votes vote down vote up
package bench

import akka.actor._
import akka.pattern.ask
import akka.util.Timeout

import scala.concurrent.duration._
import scala.reflect.runtime.universe._
import scala.concurrent.Await

import com.typesafe.config._
import net.ceedubs.ficus.Ficus._

import java.util.Properties
import java.nio.file._
import java.util.Date
import java.text.SimpleDateFormat
import java.util.Date

import Tapper._

object Bench extends App {

  def prepareOutputDirs(): String = {
    val csvDateTimeDir = FileSystems.getDefault().getPath(
      "tests/" + new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()))
    Files.createDirectories(csvDateTimeDir)
    val csvSymlink = FileSystems.getDefault().getPath("tests/current")
    if(Files.isSymbolicLink(csvSymlink)){
      Files.delete(csvSymlink)
    } else if (Files.exists(csvSymlink)) {
      throw new NotASymbolicLinkException(s"test/current is not a symbolic link. Path: $csvSymlink")
    }
    Files.createSymbolicLink(csvSymlink, csvDateTimeDir.toAbsolutePath)
    csvDateTimeDir.toAbsolutePath.toString
  }

  def parseOptions(): String = {
     val usage = """
         Usage: activator -mem 4096 "run-main bench.Bench scenario_name"
     """
    if (args.length != 1) println(usage)
    return args(0)
  }

  val scenario = parseOptions
  val config = ConfigFactory.load().getConfig(scenario)
  val duration = config.getInt("duration")
  val concurrent = config.getInt("concurrent")
  val csvDateTimeDir = prepareOutputDirs

  val system = ActorSystem("bench")
  val actorProps = Props(classOf[StatsCollector], csvDateTimeDir, config)
  val statsCollector = system.actorOf(actorProps, name = "statscollector")

  val operationsWithRatio: Map[String, Int] = config.as[Map[String, Int]]("operations")
  val numer = operationsWithRatio.values.sum
  if (concurrent < numer){
    val msg = s"concurrent($concurrent) must greater than sum of operations ratio($numer)"
    System.err.println(msg)
    throw new ApplicationConfigException(msg)
  }
  val operations = for((key, value) <- operationsWithRatio) yield {
    List.range(0, concurrent * operationsWithRatio(key) / numer).map(_ => key)
  }

  implicit val timeout = Timeout(duration * 2, SECONDS)
  var driverClz = Class.forName(config.getString("driver"))
  val drivers = operations.flatten.zipWithIndex.map{ case (operation, i) =>
    system.actorOf(Props(driverClz, operation, statsCollector, config).withDispatcher("my-dispatcher"), name = s"driver_$i")
  }

  drivers.par.map(actor => actor ? Ready()).foreach{ f =>
    Await.result(f, timeout.duration).asInstanceOf[OK]
  }

  val startAt = new Date()
  val doUntil = new Date(startAt.getTime + duration * 1000)
  drivers.par.map(actor => actor ? Go(doUntil)).foreach { f =>
    Await.result(f, timeout.duration).asInstanceOf[OK]
  }

  (statsCollector ? TearDown()).tap { f =>
    Await.result(f, timeout.duration).asInstanceOf[OK]
  }

  drivers.par.map(actor => actor ? TearDown()).foreach { f =>
    Await.result(f, timeout.duration).asInstanceOf[OK]
  }

  (drivers.head ? TearDown()).tap { f =>
    Await.result(f, timeout.duration).asInstanceOf[OK]
  }

  system.awaitTermination()
} 
Example 127
Source File: AmazonSQSPutDriver.scala    From akka-nbench   with Apache License 2.0 5 votes vote down vote up
package bench.drivers

import bench._
import akka.actor._

import com.typesafe.config._

import java.util.List;
import java.util.Map.Entry;
import java.util.Date
import scala.util.Random

import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.amazonaws.regions.Region;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.sqs.AmazonSQS;
import com.amazonaws.services.sqs.AmazonSQSClient;
import com.amazonaws.services.sqs.AmazonSQSAsyncClient;
import com.amazonaws.services.sqs.model.CreateQueueRequest;
import com.amazonaws.services.sqs.model.DeleteMessageRequest;
import com.amazonaws.services.sqs.model.DeleteQueueRequest;
import com.amazonaws.services.sqs.model.Message;
import com.amazonaws.services.sqs.model.ReceiveMessageRequest;
import com.amazonaws.services.sqs.model.SendMessageRequest;

class AmazonSQSPutDriver(operation: String, stats: ActorRef, config: Config) extends
  Driver(operation, stats, config) {

   var sqs: AmazonSQS = _
  //  var sqs: AmazonSQSAsyncClient = _
  var queueUrl: String = _
  val msgLen = config.getInt("message_length")

  override def setup(): Boolean = {
    val credentials = new ProfileCredentialsProvider().getCredentials();
    this.sqs = new AmazonSQSClient(credentials)
    //  this.sqs = new AmazonSQSAsyncClient(credentials)
    this.sqs.setRegion(Region.getRegion(Regions.AP_NORTHEAST_1))
    this.queueUrl = sqs.createQueue(new CreateQueueRequest("MyQueue")).getQueueUrl();
    true
  }

  override def run(): (Boolean, Long, Long) = {

    val start = System.currentTimeMillis

    try {
      val msg = start + Random.alphanumeric.take(msgLen).mkString
      this.sqs.sendMessage(new SendMessageRequest(this.queueUrl, msg))
      // this.sqs.sendAsyncMessage(new SendMessageRequest(this.queueUrl, "This is my message text."))
      val endAt = System.currentTimeMillis
      val elapsedMillis= endAt - start
      (true, endAt, elapsedMillis)
    } catch {
       case e: java.net.SocketTimeoutException => {
         log.error("Socket Timeout Exception has occured. reconecting...")
         setup()
         val endAt = System.currentTimeMillis
         val elapsedMillis= endAt - start
         (false, endAt, elapsedMillis)
       }
       case e: Throwable => {
         log.error("" + e)
         val endAt = System.currentTimeMillis
         val elapsedMillis= endAt - start
         (false, endAt, elapsedMillis)
       }
     }
  }
} 
Example 128
Source File: Driver.scala    From akka-nbench   with Apache License 2.0 5 votes vote down vote up
package bench.drivers

import bench._
import akka.actor._

import com.typesafe.config._

import java.util.Date

class Driver(operation: String, stats: ActorRef, config: Config) extends Actor with ActorLogging {

  val getOperation = () => {
    run _
  }

  def setup(): Boolean = {
    log.debug("setup")
    true
  }

  def run(): (Boolean, Long, Long) = {
    val start = System.currentTimeMillis
    Thread.sleep(1000)
    log.info("Dummy Driver")
    val endAt = System.currentTimeMillis
    val elapsedMillis= endAt - start
    (true, endAt, elapsedMillis)
  }

  def teardown(): Boolean = {
    log.debug("teardown")
    true
  }

  def teardownOnOnlyOneActor(): Boolean = {
    log.debug("teardownOnOnlyOneActor")
    true
  }

  def unlessTimeup(doUntil: Date): Boolean = { 
    new Date().compareTo(doUntil) < 1
  }

  def receive = {
    case Ready() =>
      setup()
      sender ! OK()

    case Go(doUntil) =>
      log.info(self.path.name + ": " + operation + " starting...")
      while(unlessTimeup(doUntil)) {
        val (ok, endAt, elapsedMillis) = getOperation()()
        this.stats ! Stat(endAt, elapsedMillis, operation, ok)
      }
      sender ! OK()

    case TearDown() =>
      teardown()
      sender ! OK()

    case TearDownOnOnlyOneActor() =>
      teardownOnOnlyOneActor()
      sender ! OK()

    case m =>
      throw new UnknownMessageException("Unknown Message: " + m)
  }
} 
Example 129
Source File: Messages.scala    From akka-nbench   with Apache License 2.0 5 votes vote down vote up
package bench

import java.util.Date

case class OK()
case class Ready()
case class Go(doUntil: Date)
case class TearDown()
case class TearDownOnOnlyOneActor()

case class Result(ok: Boolean)
case class Stat(endAt: Long,  elapsedMillis: Long, operation: String, ok: Boolean) {
  override def toString: String = {
    s"$endAt,$elapsedMillis,$operation,$ok"
  }
} 
Example 130
Source File: TimeBasedDataService.scala    From kafka-jdbc-connector   with Apache License 2.0 5 votes vote down vote up
package com.agoda.kafka.connector.jdbc.services

import java.sql.{Connection, PreparedStatement, ResultSet, Timestamp}
import java.util.{Date, GregorianCalendar, TimeZone}

import com.agoda.kafka.connector.jdbc.JdbcSourceConnectorConstants
import com.agoda.kafka.connector.jdbc.models.DatabaseProduct
import com.agoda.kafka.connector.jdbc.models.DatabaseProduct.{MsSQL, MySQL}
import com.agoda.kafka.connector.jdbc.models.Mode.TimestampMode
import com.agoda.kafka.connector.jdbc.utils.DataConverter
import org.apache.kafka.connect.data.Schema
import org.apache.kafka.connect.source.SourceRecord

import scala.collection.JavaConverters._
import scala.collection.mutable.ListBuffer
import scala.util.Try


case class TimeBasedDataService(databaseProduct: DatabaseProduct,
                                storedProcedureName: String,
                                batchSize: Int,
                                batchSizeVariableName: String,
                                timestampVariableName: String,
                                var timestampOffset: Long,
                                timestampFieldName: String,
                                topic: String,
                                keyFieldOpt: Option[String],
                                dataConverter: DataConverter,
                                calendar: GregorianCalendar = new GregorianCalendar(TimeZone.getTimeZone("UTC"))
                               ) extends DataService {

  override def createPreparedStatement(connection: Connection): Try[PreparedStatement] = Try {
    val preparedStatement = databaseProduct match {
      case MsSQL => connection.prepareStatement(s"EXECUTE $storedProcedureName @$timestampVariableName = ?, @$batchSizeVariableName = ?")
      case MySQL => connection.prepareStatement(s"CALL $storedProcedureName (@$timestampVariableName := ?, @$batchSizeVariableName := ?)")
    }
    preparedStatement.setTimestamp(1, new Timestamp(timestampOffset), calendar)
    preparedStatement.setObject(2, batchSize)
    preparedStatement
  }

  override def extractRecords(resultSet: ResultSet, schema: Schema): Try[Seq[SourceRecord]] = Try {
    val sourceRecords = ListBuffer.empty[SourceRecord]
    var max = timestampOffset
    while (resultSet.next()) {
      dataConverter.convertRecord(schema, resultSet) map { record =>
        val time = record.get(timestampFieldName).asInstanceOf[Date].getTime
        max = if(time > max) {
          keyFieldOpt match {
            case Some(keyField) =>
              sourceRecords += new SourceRecord(
                Map(JdbcSourceConnectorConstants.STORED_PROCEDURE_NAME_KEY -> storedProcedureName).asJava,
                Map(TimestampMode.entryName -> time).asJava, topic, null, schema, record.get(keyField), schema, record
              )
            case None           =>
              sourceRecords += new SourceRecord(
                Map(JdbcSourceConnectorConstants.STORED_PROCEDURE_NAME_KEY -> storedProcedureName).asJava,
                Map(TimestampMode.entryName -> time).asJava, topic, schema, record
              )
          }
          time
        } else max
      }
    }
    timestampOffset = max
    sourceRecords
  }

  override def toString: String = {
    s"""
       |{
       |   "name" : "${this.getClass.getSimpleName}"
       |   "mode" : "${TimestampMode.entryName}"
       |   "stored-procedure.name" : "$storedProcedureName"
       |}
    """.stripMargin
  }
} 
Example 131
Source File: Utils.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.rabbit

import java.text.SimpleDateFormat
import java.util.{Date, UUID}
import java.util.concurrent.ThreadFactory
import java.util.concurrent.atomic.AtomicLong

object Utils {
  def timestamp: String = {
    val formatter = new SimpleDateFormat("HH:mm:ss.SSS-dd.MM.yyyy")
    val now       = new Date()
    formatter.format(now)
  }

  def timestampWithRandom: String = timestamp + "-" + UUID.randomUUID()

  def newNamedThreadFactory(name: String): ThreadFactory = new ThreadFactory {
    val count = new AtomicLong(0)

    override def newThread(runnable: Runnable): Thread = {
      val thread = new Thread(runnable)
      thread.setName(s"$name-" + count.getAndIncrement)
      thread.setDaemon(true)
      thread
    }
  }
} 
Example 132
Source File: LDBCRouter.scala    From Raphtory   with Apache License 2.0 5 votes vote down vote up
package com.raphtory.examples.ldbc.routers

import java.text.SimpleDateFormat
import java.util.Date

import com.raphtory.core.components.Router.RouterWorker
import com.raphtory.core.model.communication.EdgeAdd
import com.raphtory.core.model.communication.EdgeDelete
import com.raphtory.core.model.communication.Type
import com.raphtory.core.model.communication.VertexAdd
import com.raphtory.core.model.communication.VertexDelete
import com.raphtory.examples.random.actors.RandomSpout

class LDBCRouter(override val routerId: Int,override val workerID:Int, override val initialManagerCount: Int) extends RouterWorker {
  override protected def parseTuple(value: Any): Unit = {

    val fileLine           = value.asInstanceOf[String].split("\\|")
    val date               = fileLine(1).substring(0, 10) + fileLine(1).substring(11, 23); //extract the day of the event
    val date2              = fileLine(2).substring(0, 10) + fileLine(1).substring(11, 23); //extract the day of the event
    val creationDate: Long = new SimpleDateFormat("yyyy-MM-ddHH:mm:ss.SSS").parse(date).getTime()
    val deletionDate: Long = new SimpleDateFormat("yyyy-MM-ddHH:mm:ss.SSS").parse(date2).getTime()
    val vertexDeletion = sys.env.getOrElse("LDBC_VERTEX_DELETION", "false").trim.toBoolean
    val edgeDeletion = sys.env.getOrElse("LDBC_EDGE_DELETION", "false").trim.toBoolean
    fileLine(0) match {
      case "person" =>
        sendGraphUpdate(VertexAdd(creationDate, assignID("person" + fileLine(3)), Type("person")))
        //sendGraphUpdate(VertexAdd(creationDate, fileLine(3).toLong,Type("person")))
        if(vertexDeletion)
          sendGraphUpdate(VertexDelete(deletionDate, assignID("person" + fileLine(3))))
      case "person_knows_person" =>
        //sendGraphUpdate(EdgeAdd(creationDate, fileLine(3).toLong,fileLine(4).toLong,Type("person_knows_person")))
        sendGraphUpdate(
                EdgeAdd(
                        creationDate,
                        assignID("person" + fileLine(3)),
                        assignID("person" + fileLine(4)),
                        Type("person_knows_person")
                )
        )
        if(edgeDeletion)
          sendGraphUpdate(EdgeDelete(deletionDate, assignID("person"+fileLine(3)),assignID("person"+fileLine(4))))
    }
  }
}
//2012-11-01T09:28:01.185+00:00|2019-07-22T11:24:24.362+00:00|35184372093644|Jose|Garcia|female|1988-05-20|111.68.47.44|Firefox 
Example 133
Source File: LDBCOldRouter.scala    From Raphtory   with Apache License 2.0 5 votes vote down vote up
package com.raphtory.examples.ldbc.routers

import java.text.SimpleDateFormat
import java.util.Date

import com.raphtory.core.components.Router.RouterWorker
import com.raphtory.core.model.communication.EdgeAdd
import com.raphtory.core.model.communication.EdgeDelete
import com.raphtory.core.model.communication.Type
import com.raphtory.core.model.communication.VertexAdd
import com.raphtory.core.model.communication.VertexDelete

class LDBCOldRouter(override val routerId: Int,override val workerID:Int, override val initialManagerCount: Int) extends RouterWorker {
  override protected def parseTuple(value: Any): Unit = {

    val fileLine = value.asInstanceOf[String].split("\\|")

    //val deletionDate:Long  = new SimpleDateFormat("yyyy-MM-ddHH:mm:ss.SSS").parse(date2).getTime()
    fileLine(0) match {
      case "person" =>
        val date = fileLine(6).substring(0, 10) + fileLine(5).substring(11, 23); //extract the day of the event
        //val date2 = fileLine(2).substring(0, 10) + fileLine(1).substring(11, 23); //extract the day of the event
        val creationDate: Long = new SimpleDateFormat("yyyy-MM-ddHH:mm:ss.SSS").parse(date).getTime()
        sendGraphUpdate(VertexAdd(creationDate, assignID("person" + fileLine(1)), Type("person")))
      //sendGraphUpdate(VertexAdd(creationDate, fileLine(3).toLong,Type("person")))
      //    sendGraphUpdate(VertexDelete(deletionDate, assignID("person"+fileLine(3))))
      case "person_knows_person" =>
        val date = fileLine(3).substring(0, 10) + fileLine(3).substring(11, 23); //extract the day of the event
        //val date2 = fileLine(2).substring(0, 10) + fileLine(1).substring(11, 23); //extract the day of the event
        val creationDate: Long = new SimpleDateFormat("yyyy-MM-ddHH:mm:ss.SSS").parse(date).getTime()
        //sendGraphUpdate(EdgeAdd(creationDate, fileLine(3).toLong,fileLine(4).toLong,Type("person_knows_person")))
        sendGraphUpdate(
                EdgeAdd(
                        creationDate,
                        assignID("person" + fileLine(1)),
                        assignID("person" + fileLine(2)),
                        Type("person_knows_person")
                )
        )
      //sendGraphUpdate(EdgeDelete(deletionDate, assignID("person"+fileLine(3)),assignID("person"+fileLine(4))))
    }
  }
}
//2012-11-01T09:28:01.185+00:00|2019-07-22T11:24:24.362+00:00|35184372093644|Jose|Garcia|female|1988-05-20|111.68.47.44|Firefox 
Example 134
Source File: OutDegree.scala    From Raphtory   with Apache License 2.0 5 votes vote down vote up
package com.raphtory.examples.random.depricated

import java.text.SimpleDateFormat
import java.util.Date

import com.raphtory.core.analysis.API.Analyser
import com.raphtory.core.utils.Utils

import scala.collection.mutable.ArrayBuffer

class OutDegree(args:Array[String]) extends Analyser(args){

  override def analyse(): Unit = {
    var results = ArrayBuffer[Int]()
    proxy.getVerticesSet().foreach { v =>
      val vertex     = proxy.getVertex(v._2)
      val totalEdges = vertex.getOutgoingNeighbors.size
      //  println("Total edges for V "+v+" "+vertex.getOutgoingNeighbors + " "+vertex.getIngoingNeighbors )
      results += totalEdges
    }
    // println("THIS IS HOW RESULTS LOOK: "+ results.groupBy(identity).mapValues(_.size))
    results.groupBy(identity).mapValues(_.size).toList
  }
  override def setup(): Unit = {}

  override def defineMaxSteps(): Int = 1

  override def processResults(results: ArrayBuffer[Any], timeStamp: Long, viewCompleteTime: Long): Unit = {}

  override def processViewResults(results: ArrayBuffer[Any], timestamp: Long, viewCompleteTime: Long): Unit = {
    val output_file = System.getenv().getOrDefault("GAB_PROJECT_OUTPUT", "/app/defout.csv").trim

    val inputFormat  = new SimpleDateFormat("E MMM dd HH:mm:ss z yyyy")
    val outputFormat = new SimpleDateFormat("dd/MM/yyyy HH:mm:ss")
    var finalResults = ArrayBuffer[(Int, Int)]()

    for (kv <- results)
      // println("KV RESULTS: " + kv)
      for (pair <- kv.asInstanceOf[List[(Int, Int)]])
        finalResults += pair

    val currentDate   = new Date(timestamp)
    val formattedDate = outputFormat.format(inputFormat.parse(currentDate.toString))
    var degrees       = finalResults.groupBy(_._1).mapValues(seq => seq.map(_._2).reduce(_ + _)).toList.sortBy(_._1) //.foreach(println)
    for ((degree, total) <- degrees) {
      var text = formattedDate + "," + degree + "," + total
      Utils.writeLines(output_file, text, "Date,OutDegree,Total")

    }
  }

  override def processWindowResults(
      results: ArrayBuffer[Any],
      timestamp: Long,
      windowSize: Long,
      viewCompleteTime: Long
  ): Unit = ???

  override def returnResults(): Any = ???
} 
Example 135
Source File: RangeAnalysisTask.scala    From Raphtory   with Apache License 2.0 5 votes vote down vote up
package com.raphtory.core.analysis.Tasks.RangeTasks

import java.util.Date

import akka.cluster.pubsub.DistributedPubSubMediator
import com.raphtory.core.analysis.API.Analyser
import com.raphtory.core.analysis.Tasks.AnalysisTask
import com.raphtory.core.model.communication.AnalyserPresentCheck
import com.raphtory.core.model.communication.AnalysisType
import com.raphtory.core.utils.Utils

class RangeAnalysisTask(managerCount:Int, jobID: String, args:Array[String],analyser: Analyser, start: Long, end: Long, jump: Long,newAnalyser:Boolean,rawFile:String)
        extends AnalysisTask(jobID: String,args, analyser,managerCount,newAnalyser,rawFile) {
  protected var currentTimestamp                            = start
  override def restartTime() = 0
  override protected def analysisType(): AnalysisType.Value = AnalysisType.range
  override def timestamp(): Long                            = currentTimestamp
  override def restart(): Unit = {
    if (currentTimestamp == end) {
      println(s"Range Analysis manager for $jobID between ${start} and ${end} finished")
      //killme()
    }
    else {
      currentTimestamp = currentTimestamp + jump

      if (currentTimestamp > end)
        currentTimestamp = end

      for (worker <- Utils.getAllReaders(managerCount))
        mediator ! DistributedPubSubMediator
          .Send(worker, AnalyserPresentCheck(this.generateAnalyzer.getClass.getName.replace("$", "")), false)
    }


  }

  override def processResults(timestamp: Long): Unit =
    analyser.processViewResults(result, this.timestamp(), viewCompleteTime)
} 
Example 136
Source File: ViewAnalysisTask.scala    From Raphtory   with Apache License 2.0 5 votes vote down vote up
package com.raphtory.core.analysis.Tasks.ViewTasks

import java.util.Date

import com.raphtory.core.analysis.API.Analyser
import com.raphtory.core.analysis.Tasks.AnalysisTask
import com.raphtory.core.model.communication.AnalysisType

class ViewAnalysisTask(managerCount:Int, jobID: String,args:Array[String], analyser: Analyser, time: Long,newAnalyser:Boolean,rawFile:String)
        extends AnalysisTask(jobID: String, args, analyser,managerCount,newAnalyser,rawFile) {
  override def timestamp(): Long = time

  override protected def analysisType(): AnalysisType.Value = AnalysisType.view

  override def restart(): Unit = {
    println(s"View Analysis manager for $jobID at ${time} finished")
    //killme()
  }

  override def processResults(timestamp: Long): Unit =
    analyser.processViewResults(result, this.timestamp(), viewCompleteTime)
} 
Example 137
Source File: S3MigrationHandlerBase.scala    From flyway-awslambda   with MIT License 5 votes vote down vote up
package crossroad0201.aws.flywaylambda

import java.text.SimpleDateFormat
import java.util.Date

import com.amazonaws.services.lambda.runtime.Context
import com.amazonaws.services.s3.AmazonS3
import crossroad0201.aws.flywaylambda.deploy.{FlywayDeployment, S3SourceFlywayDeployer}
import crossroad0201.aws.flywaylambda.migration.{FlywayMigrator, MigrationInfo, MigrationResult}
import spray.json.DefaultJsonProtocol

import scala.util.Try

object MigrationResultProtocol extends DefaultJsonProtocol {
  import spray.json._

  implicit val DateFormat = new RootJsonFormat[Date] {
    override def write(value: Date): JsValue = if (value == null) JsNull else JsString(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").format(value))
    override def read(json: JsValue): Date = ???
  }
  implicit val migrationInfoFormat = jsonFormat6(MigrationInfo.apply)
  implicit val migrationResultFormat = jsonFormat5(MigrationResult.apply)
}

trait S3MigrationHandlerBase extends FlywayMigrator {

  type ResultJson = String
  type ResultStoredPath = String

  protected def migrate(bucketName: String, prefix: String, flywayConfFileName: String = "flyway.conf")(implicit context: Context, s3Client: AmazonS3): Try[ResultJson] = {
    val logger = context.getLogger

    def resultJson(result: MigrationResult): ResultJson = {
      import MigrationResultProtocol._
      import spray.json._

      result.toJson.prettyPrint
    }

    def storeResult(deployment: FlywayDeployment, result: MigrationResult): ResultStoredPath = {
      val jsonPath = s"${deployment.sourcePrefix}/migration-result.json"
      s3Client.putObject(deployment.sourceBucket, jsonPath, resultJson(result))
      jsonPath
    }

    for {
      // Deploy Flyway resources.
      d <- new S3SourceFlywayDeployer(s3Client, bucketName, prefix, flywayConfFileName).deploy
      _ = {
        logger.log(
          s"""--- Flyway configuration ------------------------------------
             |flyway.url      = ${d.url}
             |flyway.user     = ****
             |flyway.password = ****
             |
             |SQL locations   = ${d.location}
             |SQL files       = ${d.sqlFiles.mkString(", ")}
             |-------------------------------------------------------------
              """.stripMargin)
      }

      // Migrate DB.
      r = migrate(d)
      _ = {
        logger.log(s"${r.message}!. ${r.appliedCount} applied.")
        r.infos.foreach { i =>
          logger.log(s"Version=${i.version}, Type=${i.`type`}, State=${i.state} InstalledAt=${i.installedAt} ExecutionTime=${i.execTime} Description=${i.description}")
        }
      }

      // Store migration result.
      storedPath = storeResult(d, r)
      _ = logger.log(s"Migration result stored to $bucketName/$storedPath.")

    } yield resultJson(r)
  }

} 
Example 138
Source File: MigrationResult.scala    From flyway-awslambda   with MIT License 5 votes vote down vote up
package crossroad0201.aws.flywaylambda.migration

import java.util.Date

import org.flywaydb.core.api.{MigrationInfo => FlywayMigrationInfo}

case class MigrationResult(
  last_status: String,
  rdsUrl: String,
  appliedCount: Int,
  message: String,
  infos: Seq[MigrationInfo])
object MigrationResult {
  def success(rdsUrl: String, appliedCount: Int, infos: Seq[MigrationInfo]): MigrationResult = {
    MigrationResult("SUCCESS", rdsUrl, appliedCount, "Migration success", infos)
  }
  def failure(rdsUrl: String, cause: Throwable, infos: Seq[MigrationInfo]): MigrationResult = {
    MigrationResult("FAILURE", rdsUrl, 0, s"Migration failed by ${cause.toString}", infos)
  }
}

case class MigrationInfo(
  version: String,
  `type`: String,
  installedAt: Date,
  state: String,
  execTime: Int,
  description: String)
object MigrationInfo {
  def apply(i : FlywayMigrationInfo): MigrationInfo = {
    MigrationInfo(i.getVersion.getVersion, i.getType.name, i.getInstalledOn, i.getState.name, i.getExecutionTime, i.getDescription)
  }
} 
Example 139
Source File: models.scala    From CSYE7200_Old   with MIT License 5 votes vote down vote up
package edu.neu.coe.csye7200.hedge_fund.rss

import java.net.URL
import scala.language.postfixOps
import java.util.Date

case class RssUrl(url: URL) {
  override def toString = "RSS: " + url.toString
}

trait RssFeed {
  val link: String
  val title: String
  val desc: String
  val items: Seq[RssItem]
  override def toString = title + "\n" + desc + "\n**"

  def latest = items sortWith ((a, b) => a.date.compareTo(b.date) > 0) head
}

case class AtomRssFeed(title: String, link: String, desc: String, items: Seq[RssItem]) extends RssFeed
case class XmlRssFeed(title: String, link: String, desc: String, language: String, items: Seq[RssItem]) extends RssFeed

case class RssItem(title: String, link: String, desc: String, date: Date, guid: String) {
  override def toString = date + " " + title
} 
Example 140
Source File: TheFlashTweetsProducerSpec.scala    From KafkaPlayground   with GNU General Public License v3.0 5 votes vote down vote up
package com.github.pedrovgs.kafkaplayground.flash

import java.util.Date

import com.danielasfregola.twitter4s.entities.{Geo, Tweet}
import com.github.pedrovgs.kafkaplayground.utils.EmbeddedKafkaServer
import org.scalatest.concurrent.{PatienceConfiguration, ScalaFutures}
import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers}

import scala.concurrent.duration._

object TheFlashTweetsProducerSpec {
  private val unknownLocationFlashTopic = "the-flash-tweets"
  private val locatedFlashTopic         = "the-flash-tweets-with-location"
  private val anyNotGeoLocatedTweet = Tweet(
    created_at = new Date(),
    id = 1L,
    id_str = "1",
    source = "source",
    text = "I've seen the fastest man alive!"
  )

  private val anyGeoLocatedTweet = anyNotGeoLocatedTweet.copy(
    geo = Some(Geo(Seq(12.0, 11.0), "lat-long"))
  )
}

class TheFlashTweetsProducerSpec
    extends FlatSpec
    with Matchers
    with EmbeddedKafkaServer
    with ScalaFutures
    with BeforeAndAfter {

  import TheFlashTweetsProducerSpec._

  "TheFlashTweetsProducer" should "return the tweet passed as param if the tweet has no geo location info" in {
    val result = produceTweet(anyNotGeoLocatedTweet)

    result shouldBe anyNotGeoLocatedTweet
  }

  it should "send a record with just the text of the tweet to the the-flash-tweets topic if the tweet has no geo location info" in {
    produceTweet(anyNotGeoLocatedTweet)

    val records = recordsForTopic(unknownLocationFlashTopic)

    val expectedMessage =
      s"""
         |{
         |  "message": "I've seen the fastest man alive!"
         |}
        """.stripMargin
    records.size shouldBe 1
    records.head shouldBe expectedMessage
  }

  it should "return the tweet passed as param if the tweet has geo location info" in {
    val result = produceTweet(anyGeoLocatedTweet)

    result shouldBe anyGeoLocatedTweet
  }

  it should "send a record with just the text of the tweet to the the-flash-tweets-with-location topic if the tweet has geo location info" in {
    produceTweet(anyGeoLocatedTweet)

    val records = recordsForTopic(locatedFlashTopic)

    val expectedMessage =
      s"""
         |{
         |  "latitude": 12.0,
         |  "longitude": 11.0,
         |  "id": "1",
         |  "message": "I've seen the fastest man alive!"
         |}
       """.stripMargin
    records.size shouldBe 1
    records.head shouldBe expectedMessage
  }

  it should "send a not geo-located tweet to a topic and another geo-located to the other topic configured" in {
    produceTweet(anyNotGeoLocatedTweet)
    produceTweet(anyGeoLocatedTweet)

    val locatedTopicRecords         = recordsForTopic(locatedFlashTopic)
    val unknownLocationTopicRecords = recordsForTopic(unknownLocationFlashTopic)

    locatedTopicRecords.size shouldBe 1
    unknownLocationTopicRecords.size shouldBe 1
  }

  private def produceTweet(tweet: Tweet) =
    new TheFlashTweetsProducer(kafkaServerAddress())(tweet)
      .futureValue(timeout = PatienceConfiguration.Timeout(1.seconds))

} 
Example 141
Source File: CliLogger.scala    From Argus-SAF   with Apache License 2.0 5 votes vote down vote up
package org.argus.jnsaf.native_statistics

import java.io.{File, FileWriter, PrintWriter}
import java.text.SimpleDateFormat
import java.util.Date

 
object CliLogger {
  
  def timeStamp = new SimpleDateFormat("yyyyMMdd-HHmmss").format(new Date)
  
  def outPrint(s : String) {
    scala.Console.out.print(s)
    scala.Console.out.flush()
  }

  def outPrintln(s : String) {
    scala.Console.out.println(s)
    scala.Console.out.flush()
  }

  def outPrintln() {
    scala.Console.out.println()
    scala.Console.out.flush()
  }

  def errPrintln(s : String) {
    scala.Console.err.println(s)
    scala.Console.err.flush()
  }

  def errPrintln() {
    scala.Console.err.println()
    scala.Console.err.flush()
  }
  
  def logError(dir: File, text: String, e: Throwable) {
    outPrintln()
    errPrintln(text + e.getMessage)
    val f = new File(dir, ".errorlog")
    f.getParentFile.mkdirs
    val fw = new FileWriter(f)
    try {
      val pw = new PrintWriter(fw)
      pw.println("An error occurred on " + timeStamp)
      e.printStackTrace(pw)
      fw.close()
      outPrintln("Written: " + f.getAbsolutePath)
    } catch {
      case e : Throwable =>
        errPrintln("Error: " + e.getMessage)
    }
  }
} 
Example 142
Source File: CliLogger.scala    From Argus-SAF   with Apache License 2.0 5 votes vote down vote up
package org.argus.saf.cli.util

import java.io.{File, FileWriter, PrintWriter}
import java.text.SimpleDateFormat
import java.util.Date

 
object CliLogger {
  
  def timeStamp: String = new SimpleDateFormat("yyyyMMdd-HHmmss").format(new Date)
  
  def outPrint(s : String) {
    scala.Console.out.print(s)
    scala.Console.out.flush()
  }

  def outPrintln(s : String) {
    scala.Console.out.println(s)
    scala.Console.out.flush()
  }

  def outPrintln() {
    scala.Console.out.println()
    scala.Console.out.flush()
  }

  def errPrintln(s : String) {
    scala.Console.err.println(s)
    scala.Console.err.flush()
  }

  def errPrintln() {
    scala.Console.err.println()
    scala.Console.err.flush()
  }
  
  def logError(dir: File, text: String, e: Throwable) {
    outPrintln()
    errPrintln(text + e.getMessage)
    val f = new File(dir, ".errorlog")
    f.getParentFile.mkdirs
    val fw = new FileWriter(f)
    try {
      val pw = new PrintWriter(fw)
      pw.println("An error occurred on " + timeStamp)
      e.printStackTrace(pw)
      fw.close()
      outPrintln("Written: " + f.getAbsolutePath)
    } catch {
      case e : Throwable =>
        errPrintln("Error: " + e.getMessage)
    }
  }
} 
Example 143
Source File: IotMessageConverter.scala    From toketi-kafka-connect-iothub   with MIT License 5 votes vote down vote up
// Copyright (c) Microsoft. All rights reserved.

package com.microsoft.azure.iot.kafka.connect.source

import java.time.Instant
import java.util.Date

import com.microsoft.azure.eventhubs.impl.AmqpConstants
import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct}

import scala.collection.JavaConverters._
import scala.reflect.ClassTag

object IotMessageConverter {

  val offsetKey = "offset"

  private val schemaName          = "iothub.kafka.connect"
  private val schemaVersion       = 1
  private val deviceIdKey         = "deviceId"
  private val contentTypeKey      = "contentType"
  private val sequenceNumberKey   = "sequenceNumber"
  private val enqueuedTimeKey     = "enqueuedTime"
  private val contentKey          = "content"
  private val systemPropertiesKey = "systemProperties"
  private val propertiesKey       = "properties"
  private val deviceIdIotHubKey   = "iothub-connection-device-id"

  // Public for testing purposes
  lazy val schema: Schema = SchemaBuilder.struct()
    .name(schemaName)
    .version(schemaVersion)
    .field(deviceIdKey, Schema.STRING_SCHEMA)
    .field(offsetKey, Schema.STRING_SCHEMA)
    .field(contentTypeKey, Schema.OPTIONAL_STRING_SCHEMA)
    .field(enqueuedTimeKey, Schema.STRING_SCHEMA)
    .field(sequenceNumberKey, Schema.INT64_SCHEMA)
    .field(contentKey, Schema.STRING_SCHEMA)
    .field(systemPropertiesKey, propertiesMapSchema)
    .field(propertiesKey, propertiesMapSchema)

  private lazy val propertiesMapSchema: Schema = SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.STRING_SCHEMA)

  def getIotMessageStruct(iotMessage: IotMessage): Struct = {

    val systemProperties = iotMessage.systemProperties
    val deviceId: String = getOrDefaultAndRemove(systemProperties, deviceIdIotHubKey, "")
    val offset: String = getOrDefaultAndRemove(systemProperties, AmqpConstants.OFFSET_ANNOTATION_NAME, "")
    val sequenceNumber: Long = getOrDefaultAndRemove(systemProperties, AmqpConstants.SEQUENCE_NUMBER_ANNOTATION_NAME, 0)
    val enqueuedTime: Option[Instant] = getEnqueuedTime(systemProperties)
    val enqueuedTimeStr = if(enqueuedTime.isDefined) enqueuedTime.get.toString else ""

    val properties = iotMessage.properties
    val contentType: String = getOrDefaultAndRemove(properties, contentTypeKey, "")

    val systemPropertiesMap = systemProperties.map(i => (i._1, i._2.toString))

    new Struct(schema)
      .put(deviceIdKey, deviceId)
      .put(offsetKey, offset)
      .put(contentTypeKey, contentType)
      .put(enqueuedTimeKey, enqueuedTimeStr)
      .put(sequenceNumberKey, sequenceNumber)
      .put(contentKey, iotMessage.content)
      .put(systemPropertiesKey, systemPropertiesMap.asJava)
      .put(propertiesKey, properties.asJava)
  }

  private def getEnqueuedTime(map: scala.collection.mutable.Map[String, Object]): Option[Instant] = {
    val enqueuedTimeValue: Date = getOrDefaultAndRemove(map, AmqpConstants.ENQUEUED_TIME_UTC_ANNOTATION_NAME, null)
    if (enqueuedTimeValue != null) Some(enqueuedTimeValue.toInstant) else None
  }

  private def getOrDefaultAndRemove[T: ClassTag, S: ClassTag](map: scala.collection.mutable.Map[String, S],
      key: String, defaultVal: T): T = {

    if (map.contains(key)) {
      val retVal: T = map(key).asInstanceOf[T]
      map.remove(key)
      retVal
    } else {
      defaultVal
    }
  }
} 
Example 144
Source File: C2DMessageConverterTest.scala    From toketi-kafka-connect-iothub   with MIT License 5 votes vote down vote up
package com.microsoft.azure.iot.kafka.connect.sink

import java.time.Instant
import java.util.Date

import com.microsoft.azure.iot.kafka.connect.sink.testhelpers.{TestSchemas, TestSinkRecords}
import com.microsoft.azure.iot.kafka.connect.source.JsonSerialization
import org.apache.kafka.connect.errors.ConnectException
import org.scalatest.{FlatSpec, GivenWhenThen}

class C2DMessageConverterTest extends FlatSpec with GivenWhenThen with JsonSerialization {

  "C2DMessageConverter" should "validate the schema of a struct record against the expected schema" in {
    Given("A valid record schema")
    var schema = TestSchemas.validSchema
    When("ValidateSchema is called")
    Then("No exception is thrown")
    C2DMessageConverter.validateStructSchema(schema)

    Given("A valid record schema")
    schema = TestSchemas.validSchemaWithMissingOptionalField
    When("ValidateSchema is called")
    Then("No exception is thrown")
    C2DMessageConverter.validateStructSchema(schema)

    Given("A schema with an invalid type")
    schema = TestSchemas.invalidSchemaTypeSchema
    When("ValidateSchema is called")
    Then("A ConnectException is thrown")
    intercept[ConnectException] {
      C2DMessageConverter.validateStructSchema(schema)
    }

    Given("A schema with an invalid field type")
    schema = TestSchemas.invalidFieldTypeSchema
    When("ValidateSchema is called")
    Then("A ConnectException is thrown")
    intercept[ConnectException] {
      C2DMessageConverter.validateStructSchema(schema)
    }

    Given("A schema with a missing field")
    schema = TestSchemas.missingFieldSchema
    When("ValidateSchema is called")
    Then("A ConnectException is thrown")
    intercept[ConnectException] {
      C2DMessageConverter.validateStructSchema(schema)
    }
  }

  "C2DMessageConverter" should "deserialize sink records of String schema and return the C2D Message" in {
    Given("A valid record of string schema")
    var record = TestSinkRecords.getStringSchemaRecord()
    When("DeserializeMessage is called")
    var c2DMessage = C2DMessageConverter.deserializeMessage(record, record.valueSchema())
    Then("A valid C2D message is obtained")
    assert(c2DMessage != null)
    assert(c2DMessage.deviceId == "device1")
    assert(c2DMessage.messageId == "message1")
    assert(c2DMessage.message == "Turn on")
    assert(c2DMessage.expiryTime.isDefined)
    assert(c2DMessage.expiryTime.get.after(Date.from(Instant.parse("2016-01-01T00:00:00Z"))))

    Given("A valid record of string schema")
    record = TestSinkRecords.getStringSchemaRecord2()
    When("DeserializeMessage is called")
    c2DMessage = C2DMessageConverter.deserializeMessage(record, record.valueSchema())
    Then("A valid C2D message is obtained")
    assert(c2DMessage != null)
    assert(c2DMessage.deviceId == "device1")
    assert(c2DMessage.messageId == "message1")
    assert(c2DMessage.message == "Turn on")
    assert(c2DMessage.expiryTime.isEmpty)
  }

  "C2DMessageConverter" should "throw an exception if record with string schema has invalid data" in {
    Given("A record of string schema with invalid data")
    val record = TestSinkRecords.getInvalidScringSchemaRecord()
    When("DeserializeMessage is called")
    Then("Then a ConnectException is called")
    intercept[ConnectException] {
      C2DMessageConverter.deserializeMessage(record, record.valueSchema())
    }
  }
} 
Example 145
Source File: AccountRepository.scala    From frdomain-extras   with Apache License 2.0 5 votes vote down vote up
package frdomain.ch6.domain
package repository

import java.util.Date
import zio._

import common._
import model.{ Account, Balance }

object AccountRepository extends Serializable {
  trait Service extends Serializable {
    def query(no: String): Task[Option[Account]]
    def store(a: Account): Task[Account]
    def query(openedOn: Date): Task[Seq[Account]]
    def all: Task[Seq[Account]]
    def balance(no: String): Task[Option[Balance]]
  }
} 
Example 146
Source File: InMemoryAccountRepository.scala    From frdomain-extras   with Apache License 2.0 5 votes vote down vote up
package frdomain.ch6.domain
package repository

import java.util.Date
import zio._

import model.{ Account, Balance }
import common._

class InMemoryAccountRepository(ref: Ref[Map[String, Account]])  {
  val accountRepository = new AccountRepository.Service {

    override def all: Task[Seq[Account]] =
      ref.get.map(_.values.toList)

    override def query(no: String): Task[Option[Account]] =
      ref.get.map(_.get(no))

    override def query(openedOn: Date): Task[Seq[Account]] =
      ref.get.map(_.values.filter(_.dateOfOpen.getOrElse(today) == openedOn).toSeq)

    override def store(a: Account): Task[Account] =
      ref.update(m => m + (a.no -> a)).map(_ => a)

    override def balance(no: String): Task[Option[Balance]] =
      ref.get.map(_.get(no).map(_.balance)) 
  }
}

object InMemoryAccountRepository {

  val layer: ZLayer[Any, Throwable, AccountRepository] =
    ZLayer.fromEffect {
      for {
        ref <- Ref.make(Map.empty[String, Account])
      } yield new InMemoryAccountRepository(ref).accountRepository
    }
} 
Example 147
Source File: MesosDriverDescription.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.mesos

import java.util.Date

import org.apache.spark.SparkConf
import org.apache.spark.deploy.Command
import org.apache.spark.scheduler.cluster.mesos.MesosClusterRetryState


private[spark] class MesosDriverDescription(
    val name: String,
    val jarUrl: String,
    val mem: Int,
    val cores: Double,
    val supervise: Boolean,
    val command: Command,
    schedulerProperties: Map[String, String],
    val submissionId: String,
    val submissionDate: Date,
    val retryState: Option[MesosClusterRetryState] = None)
  extends Serializable {

  val conf = new SparkConf(false)
  schedulerProperties.foreach {case (k, v) => conf.set(k, v)}

  def copy(
      name: String = name,
      jarUrl: String = jarUrl,
      mem: Int = mem,
      cores: Double = cores,
      supervise: Boolean = supervise,
      command: Command = command,
      schedulerProperties: SparkConf = conf,
      submissionId: String = submissionId,
      submissionDate: Date = submissionDate,
      retryState: Option[MesosClusterRetryState] = retryState): MesosDriverDescription = {

    new MesosDriverDescription(name, jarUrl, mem, cores, supervise, command, conf.getAll.toMap,
      submissionId, submissionDate, retryState)
  }

  override def toString: String = s"MesosDriverDescription (${command.mainClass})"
} 
Example 148
Source File: PMMLModelExport.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.pmml.export

import java.text.SimpleDateFormat
import java.util.{Date, Locale}

import scala.beans.BeanProperty

import org.dmg.pmml.{Application, Header, PMML, Timestamp}

private[mllib] trait PMMLModelExport {

  
  @BeanProperty
  val pmml: PMML = {
    val version = getClass.getPackage.getImplementationVersion
    val app = new Application("Apache Spark MLlib").setVersion(version)
    val timestamp = new Timestamp()
      .addContent(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss", Locale.US).format(new Date()))
    val header = new Header()
      .setApplication(app)
      .setTimestamp(timestamp)
    new PMML("4.2", header, null)
  }
} 
Example 149
Source File: DriverInfo.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master

import java.util.Date

import org.apache.spark.deploy.DriverDescription
import org.apache.spark.util.Utils

private[deploy] class DriverInfo(
    val startTime: Long,
    val id: String,
    val desc: DriverDescription,
    val submitDate: Date)
  extends Serializable {

  @transient var state: DriverState.Value = DriverState.SUBMITTED
  
  @transient var worker: Option[WorkerInfo] = None

  init()

  private def readObject(in: java.io.ObjectInputStream): Unit = Utils.tryOrIOException {
    in.defaultReadObject()
    init()
  }

  private def init(): Unit = {
    state = DriverState.SUBMITTED
    worker = None
    exception = None
  }
} 
Example 150
Source File: AllStagesResourceSuite.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.status.api.v1

import java.util.Date

import scala.collection.mutable.LinkedHashMap

import org.apache.spark.SparkFunSuite
import org.apache.spark.scheduler.{StageInfo, TaskInfo, TaskLocality}
import org.apache.spark.ui.jobs.UIData.{StageUIData, TaskUIData}

class AllStagesResourceSuite extends SparkFunSuite {

  def getFirstTaskLaunchTime(taskLaunchTimes: Seq[Long]): Option[Date] = {
    val tasks = new LinkedHashMap[Long, TaskUIData]
    taskLaunchTimes.zipWithIndex.foreach { case (time, idx) =>
      tasks(idx.toLong) = TaskUIData(
        new TaskInfo(idx, idx, 1, time, "", "", TaskLocality.ANY, false), None)
    }

    val stageUiData = new StageUIData()
    stageUiData.taskData = tasks
    val status = StageStatus.ACTIVE
    val stageInfo = new StageInfo(
      1, 1, "stage 1", 10, Seq.empty, Seq.empty, "details abc")
    val stageData = AllStagesResource.stageUiToStageData(status, stageInfo, stageUiData, false)

    stageData.firstTaskLaunchedTime
  }

  test("firstTaskLaunchedTime when there are no tasks") {
    val result = getFirstTaskLaunchTime(Seq())
    assert(result == None)
  }

  test("firstTaskLaunchedTime when there are tasks but none launched") {
    val result = getFirstTaskLaunchTime(Seq(-100L, -200L, -300L))
    assert(result == None)
  }

  test("firstTaskLaunchedTime when there are tasks and some launched") {
    val result = getFirstTaskLaunchTime(Seq(-100L, 1449255596000L, 1449255597000L))
    assert(result == Some(new Date(1449255596000L)))
  }

} 
Example 151
Source File: MasterWebUISuite.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master.ui

import java.io.DataOutputStream
import java.net.{HttpURLConnection, URL}
import java.nio.charset.StandardCharsets
import java.util.Date

import scala.collection.mutable.HashMap

import org.mockito.Mockito.{mock, times, verify, when}
import org.scalatest.BeforeAndAfterAll

import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.deploy.DeployMessages.{KillDriverResponse, RequestKillDriver}
import org.apache.spark.deploy.DeployTestUtils._
import org.apache.spark.deploy.master._
import org.apache.spark.rpc.{RpcEndpointRef, RpcEnv}


class MasterWebUISuite extends SparkFunSuite with BeforeAndAfterAll {

  val conf = new SparkConf
  val securityMgr = new SecurityManager(conf)
  val rpcEnv = mock(classOf[RpcEnv])
  val master = mock(classOf[Master])
  val masterEndpointRef = mock(classOf[RpcEndpointRef])
  when(master.securityMgr).thenReturn(securityMgr)
  when(master.conf).thenReturn(conf)
  when(master.rpcEnv).thenReturn(rpcEnv)
  when(master.self).thenReturn(masterEndpointRef)
  val masterWebUI = new MasterWebUI(master, 0)

  override def beforeAll() {
    super.beforeAll()
    masterWebUI.bind()
  }

  override def afterAll() {
    masterWebUI.stop()
    super.afterAll()
  }

  test("kill application") {
    val appDesc = createAppDesc()
    // use new start date so it isn't filtered by UI
    val activeApp = new ApplicationInfo(
      new Date().getTime, "app-0", appDesc, new Date(), null, Int.MaxValue)

    when(master.idToApp).thenReturn(HashMap[String, ApplicationInfo]((activeApp.id, activeApp)))

    val url = s"http://localhost:${masterWebUI.boundPort}/app/kill/"
    val body = convPostDataToString(Map(("id", activeApp.id), ("terminate", "true")))
    val conn = sendHttpRequest(url, "POST", body)
    conn.getResponseCode

    // Verify the master was called to remove the active app
    verify(master, times(1)).removeApplication(activeApp, ApplicationState.KILLED)
  }

  test("kill driver") {
    val activeDriverId = "driver-0"
    val url = s"http://localhost:${masterWebUI.boundPort}/driver/kill/"
    val body = convPostDataToString(Map(("id", activeDriverId), ("terminate", "true")))
    val conn = sendHttpRequest(url, "POST", body)
    conn.getResponseCode

    // Verify that master was asked to kill driver with the correct id
    verify(masterEndpointRef, times(1)).ask[KillDriverResponse](RequestKillDriver(activeDriverId))
  }

  private def convPostDataToString(data: Map[String, String]): String = {
    (for ((name, value) <- data) yield s"$name=$value").mkString("&")
  }

  
  private def sendHttpRequest(
      url: String,
      method: String,
      body: String = ""): HttpURLConnection = {
    val conn = new URL(url).openConnection().asInstanceOf[HttpURLConnection]
    conn.setRequestMethod(method)
    if (body.nonEmpty) {
      conn.setDoOutput(true)
      conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded")
      conn.setRequestProperty("Content-Length", Integer.toString(body.length))
      val out = new DataOutputStream(conn.getOutputStream)
      out.write(body.getBytes(StandardCharsets.UTF_8))
      out.close()
    }
    conn
  }
} 
Example 152
Source File: DeployTestUtils.scala    From multi-tenancy-spark   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy

import java.io.File
import java.util.Date

import org.apache.spark.{SecurityManager, SparkConf}
import org.apache.spark.deploy.master.{ApplicationInfo, DriverInfo, WorkerInfo}
import org.apache.spark.deploy.worker.{DriverRunner, ExecutorRunner}

private[deploy] object DeployTestUtils {
  def createAppDesc(): ApplicationDescription = {
    val cmd = new Command("mainClass", List("arg1", "arg2"), Map(), Seq(), Seq(), Seq())
    new ApplicationDescription("name", Some(4), 1234, cmd, "appUiUrl")
  }

  def createAppInfo() : ApplicationInfo = {
    val appDesc = createAppDesc()
    val appInfo = new ApplicationInfo(JsonConstants.appInfoStartTime,
      "id", appDesc, JsonConstants.submitDate, null, Int.MaxValue)
    appInfo.endTime = JsonConstants.currTimeInMillis
    appInfo
  }

  def createDriverCommand(): Command = new Command(
    "org.apache.spark.FakeClass", Seq("some arg --and-some options -g foo"),
    Map(("K1", "V1"), ("K2", "V2")), Seq("cp1", "cp2"), Seq("lp1", "lp2"), Seq("-Dfoo")
  )

  def createDriverDesc(): DriverDescription =
    new DriverDescription("hdfs://some-dir/some.jar", 100, 3, false, createDriverCommand())

  def createDriverInfo(): DriverInfo = new DriverInfo(3, "driver-3",
    createDriverDesc(), new Date())

  def createWorkerInfo(): WorkerInfo = {
    val workerInfo = new WorkerInfo("id", "host", 8080, 4, 1234, null, "http://publicAddress:80")
    workerInfo.lastHeartbeat = JsonConstants.currTimeInMillis
    workerInfo
  }

  def createExecutorRunner(execId: Int): ExecutorRunner = {
    new ExecutorRunner(
      "appId",
      execId,
      createAppDesc(),
      4,
      1234,
      null,
      "workerId",
      "host",
      123,
      "publicAddress",
      new File("sparkHome"),
      new File("workDir"),
      "spark://worker",
      new SparkConf,
      Seq("localDir"),
      ExecutorState.RUNNING)
  }

  def createDriverRunner(driverId: String): DriverRunner = {
    val conf = new SparkConf()
    new DriverRunner(
      conf,
      driverId,
      new File("workDir"),
      new File("sparkHome"),
      createDriverDesc(),
      null,
      "spark://worker",
      new SecurityManager(conf))
  }
} 
Example 153
Source File: AppAnalyzer.scala    From sparklens   with Apache License 2.0 5 votes vote down vote up
package com.qubole.sparklens.analyzer

import java.util.Date
import java.util.concurrent.TimeUnit

import com.qubole.sparklens.common.AppContext

import scala.collection.mutable.ListBuffer


  def pd(millis: Long) : String = {
    "%02dm %02ds".format(
      TimeUnit.MILLISECONDS.toMinutes(millis),
      TimeUnit.MILLISECONDS.toSeconds(millis) -
        TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(millis))
    )
  }

  def pcm(millis: Long) : String = {
    val millisForMinutes = millis % (60*60*1000)

    "%02dh %02dm".format(
      TimeUnit.MILLISECONDS.toHours(millis),
      TimeUnit.MILLISECONDS.toMinutes(millisForMinutes))
  }

  implicit class PrintlnStringBuilder(sb: StringBuilder) {
    def println(x: Any): StringBuilder = {
      sb.append(x).append("\n")
    }
    def print(x: Any): StringBuilder = {
      sb.append(x)
    }
  }
}

object AppAnalyzer {
  def startAnalyzers(appContext: AppContext): Unit = {
    val list = new ListBuffer[AppAnalyzer]
    list += new SimpleAppAnalyzer
    list += new HostTimelineAnalyzer
    list += new ExecutorTimelineAnalyzer
    list += new AppTimelineAnalyzer
    list += new JobOverlapAnalyzer
    list += new EfficiencyStatisticsAnalyzer
    list += new ExecutorWallclockAnalyzer
    list += new StageSkewAnalyzer


    list.foreach( x => {
      try {
        val output = x.analyze(appContext)
        println(output)
      } catch {
        case e:Throwable => {
          println(s"Failed in Analyzer ${x.getClass.getSimpleName}")
          e.printStackTrace()
        }
      }
    })
  }

} 
Example 154
Source File: PMMLModelExport.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.pmml.export

import java.text.SimpleDateFormat
import java.util.Date

import scala.beans.BeanProperty

import org.dmg.pmml.{Application, Header, PMML, Timestamp}

private[mllib] trait PMMLModelExport {

  
  @BeanProperty
  val pmml: PMML = new PMML

  setHeader(pmml)

  private def setHeader(pmml: PMML): Unit = {
    val version = getClass.getPackage.getImplementationVersion
    val app = new Application().withName("Apache Spark MLlib").withVersion(version)
    val timestamp = new Timestamp()
      .withContent(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss").format(new Date()))
    val header = new Header()
      .withApplication(app)
      .withTimestamp(timestamp)
    pmml.setHeader(header)
  }
} 
Example 155
Source File: MesosDriverDescription.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.mesos

import java.util.Date

import org.apache.spark.deploy.Command
import org.apache.spark.scheduler.cluster.mesos.MesosClusterRetryState


private[spark] class MesosDriverDescription(
    val name: String,
    val jarUrl: String,
    val mem: Int,
    val cores: Double,
    val supervise: Boolean,
    val command: Command,
    val schedulerProperties: Map[String, String],
    val submissionId: String,
    val submissionDate: Date,
    val retryState: Option[MesosClusterRetryState] = None)
  extends Serializable {

  def copy(
      name: String = name,
      jarUrl: String = jarUrl,
      mem: Int = mem,
      cores: Double = cores,
      supervise: Boolean = supervise,
      command: Command = command,
      schedulerProperties: Map[String, String] = schedulerProperties,
      submissionId: String = submissionId,
      submissionDate: Date = submissionDate,
      retryState: Option[MesosClusterRetryState] = retryState): MesosDriverDescription = {
    new MesosDriverDescription(name, jarUrl, mem, cores, supervise, command, schedulerProperties,
      submissionId, submissionDate, retryState)
  }

  override def toString: String = s"MesosDriverDescription (${command.mainClass})"
} 
Example 156
Source File: DriverInfo.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.master

import java.util.Date

import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.deploy.DriverDescription
import org.apache.spark.util.Utils

private[deploy] class DriverInfo(
    val startTime: Long,
    val id: String,
    val desc: DriverDescription,
    val submitDate: Date)
  extends Serializable {

  @transient var state: DriverState.Value = DriverState.SUBMITTED
  
  @transient var worker: Option[WorkerInfo] = None

  init()

  private def readObject(in: java.io.ObjectInputStream): Unit = Utils.tryOrIOException {
    in.defaultReadObject()
    init()
  }

  private def init(): Unit = {
    state = DriverState.SUBMITTED
    worker = None
    exception = None
  }
} 
Example 157
Source File: MesosClusterSchedulerSuite.scala    From iolap   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.scheduler.mesos

import java.util.Date

import org.scalatest.mock.MockitoSugar

import org.apache.spark.deploy.Command
import org.apache.spark.deploy.mesos.MesosDriverDescription
import org.apache.spark.scheduler.cluster.mesos._
import org.apache.spark.{LocalSparkContext, SparkConf, SparkFunSuite}


class MesosClusterSchedulerSuite extends SparkFunSuite with LocalSparkContext with MockitoSugar {

  private val command = new Command("mainClass", Seq("arg"), null, null, null, null)

  test("can queue drivers") {
    val conf = new SparkConf()
    conf.setMaster("mesos://localhost:5050")
    conf.setAppName("spark mesos")
    val scheduler = new MesosClusterScheduler(
      new BlackHoleMesosClusterPersistenceEngineFactory, conf) {
      override def start(): Unit = { ready = true }
    }
    scheduler.start()
    val response = scheduler.submitDriver(
        new MesosDriverDescription("d1", "jar", 1000, 1, true,
          command, Map[String, String](), "s1", new Date()))
    assert(response.success)
    val response2 =
      scheduler.submitDriver(new MesosDriverDescription(
        "d1", "jar", 1000, 1, true, command, Map[String, String](), "s2", new Date()))
    assert(response2.success)
    val state = scheduler.getSchedulerState()
    val queuedDrivers = state.queuedDrivers.toList
    assert(queuedDrivers(0).submissionId == response.submissionId)
    assert(queuedDrivers(1).submissionId == response2.submissionId)
  }

  test("can kill queued drivers") {
    val conf = new SparkConf()
    conf.setMaster("mesos://localhost:5050")
    conf.setAppName("spark mesos")
    val scheduler = new MesosClusterScheduler(
      new BlackHoleMesosClusterPersistenceEngineFactory, conf) {
      override def start(): Unit = { ready = true }
    }
    scheduler.start()
    val response = scheduler.submitDriver(
        new MesosDriverDescription("d1", "jar", 1000, 1, true,
          command, Map[String, String](), "s1", new Date()))
    assert(response.success)
    val killResponse = scheduler.killDriver(response.submissionId)
    assert(killResponse.success)
    val state = scheduler.getSchedulerState()
    assert(state.queuedDrivers.isEmpty)
  }
} 
Example 158
Source File: FinaglePostgresEncoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.finagle.postgres

import com.twitter.finagle.postgres._
import com.twitter.finagle.postgres.values._
import com.twitter.finagle.postgres.values.ValueEncoder._
import io.getquill.FinaglePostgresContext
import java.util.{ Date, UUID }
import java.time._

trait FinaglePostgresEncoders {
  this: FinaglePostgresContext[_] =>

  type Encoder[T] = FinaglePostgresEncoder[T]

  case class FinaglePostgresEncoder[T](encoder: ValueEncoder[T]) extends BaseEncoder[T] {
    override def apply(index: Index, value: T, row: PrepareRow) =
      row :+ Param(value)(encoder)
  }

  def encoder[T](implicit e: ValueEncoder[T]): Encoder[T] = FinaglePostgresEncoder(e)

  def encoder[T, U](f: U => T)(implicit e: ValueEncoder[T]): Encoder[U] =
    encoder[U](e.contraMap(f))

  implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] =
    FinaglePostgresEncoder(e.encoder.contraMap(mapped.f))

  implicit def optionEncoder[T](implicit e: Encoder[T]): Encoder[Option[T]] =
    FinaglePostgresEncoder[Option[T]](option(e.encoder))

  implicit val stringEncoder: Encoder[String] = encoder[String]
  implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal]
  implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean]
  implicit val byteEncoder: Encoder[Byte] = encoder[Short, Byte](_.toShort)
  implicit val shortEncoder: Encoder[Short] = encoder[Short]
  implicit val intEncoder: Encoder[Int] = encoder[Int]
  implicit val longEncoder: Encoder[Long] = encoder[Long]
  implicit val floatEncoder: Encoder[Float] = encoder[Float]
  implicit val doubleEncoder: Encoder[Double] = encoder[Double]
  implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]](bytea)
  implicit val dateEncoder: Encoder[Date] =
    encoder[LocalDateTime, Date]((v: Date) => LocalDateTime.ofInstant(v.toInstant, ZoneId.systemDefault()))
  implicit val localDateEncoder: Encoder[LocalDate] = encoder[LocalDate]
  implicit val localDateTimeEncoder: Encoder[LocalDateTime] = encoder[LocalDateTime]
  implicit val uuidEncoder: Encoder[UUID] = encoder[UUID]
} 
Example 159
Source File: FinaglePostgresDecoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.finagle.postgres

import java.nio.charset.Charset
import java.time.{ LocalDate, LocalDateTime, ZoneId }
import java.util.{ Date, UUID }

import com.twitter.finagle.postgres.values.ValueDecoder
import com.twitter.util.Return
import com.twitter.util.Throw
import com.twitter.util.Try
import io.getquill.FinaglePostgresContext
import io.getquill.util.Messages.fail
import io.netty.buffer.ByteBuf

trait FinaglePostgresDecoders {
  this: FinaglePostgresContext[_] =>

  import ValueDecoder._

  type Decoder[T] = FinaglePostgresDecoder[T]

  case class FinaglePostgresDecoder[T](
    vd:      ValueDecoder[T],
    default: Throwable => T  = (e: Throwable) => fail(e.getMessage)
  ) extends BaseDecoder[T] {
    override def apply(index: Index, row: ResultRow): T =
      row.getTry[T](index)(vd) match {
        case Return(r) => r
        case Throw(e)  => default(e)
      }

    def orElse[U](f: U => T)(implicit vdu: ValueDecoder[U]): FinaglePostgresDecoder[T] = {
      val mappedVd = vdu.map[T](f)
      FinaglePostgresDecoder[T](
        new ValueDecoder[T] {
          def decodeText(recv: String, text: String): Try[T] = {
            val t = vd.decodeText(recv, text)
            if (t.isReturn) t
            else mappedVd.decodeText(recv, text)
          }
          def decodeBinary(recv: String, bytes: ByteBuf, charset: Charset): Try[T] = {
            val t = vd.decodeBinary(recv, bytes, charset)
            if (t.isReturn) t
            else mappedVd.decodeBinary(recv, bytes, charset)
          }
        }
      )
    }
  }

  implicit def decoderDirectly[T](implicit vd: ValueDecoder[T]): Decoder[T] = FinaglePostgresDecoder(vd)
  def decoderMapped[U, T](f: U => T)(implicit vd: ValueDecoder[U]): Decoder[T] = FinaglePostgresDecoder(vd.map[T](f))

  implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] =
    FinaglePostgresDecoder[Option[T]](
      new ValueDecoder[Option[T]] {
        def decodeText(recv: String, text: String): Try[Option[T]] = Return(d.vd.decodeText(recv, text).toOption)
        def decodeBinary(recv: String, bytes: ByteBuf, charset: Charset): Try[Option[T]] = Return(d.vd.decodeBinary(recv, bytes, charset).toOption)
      },
      _ => None
    )

  implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], d: Decoder[I]): Decoder[O] =
    decoderMapped[I, O](mapped.f)(d.vd)

  implicit val stringDecoder: Decoder[String] = decoderDirectly[String]
  implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoderDirectly[BigDecimal]
  implicit val booleanDecoder: Decoder[Boolean] = decoderDirectly[Boolean]
  implicit val shortDecoder: Decoder[Short] = decoderDirectly[Short]
  implicit val byteDecoder: Decoder[Byte] = decoderMapped[Short, Byte](_.toByte)
  implicit val intDecoder: Decoder[Int] = decoderDirectly[Int].orElse[Long](_.toInt)
  implicit val longDecoder: Decoder[Long] = decoderDirectly[Long].orElse[Int](_.toLong)
  implicit val floatDecoder: Decoder[Float] = decoderDirectly[Float].orElse[Double](_.toFloat)
  implicit val doubleDecoder: Decoder[Double] = decoderDirectly[Double]
  implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoderDirectly[Array[Byte]]
  implicit val dateDecoder: Decoder[Date] = decoderMapped[LocalDateTime, Date](d => Date.from(d.atZone(ZoneId.systemDefault()).toInstant))
  implicit val localDateDecoder: Decoder[LocalDate] = decoderDirectly[LocalDate].orElse[LocalDateTime](_.toLocalDate)
  implicit val localDateTimeDecoder: Decoder[LocalDateTime] = decoderDirectly[LocalDateTime].orElse[LocalDate](_.atStartOfDay)
  implicit val uuidDecoder: Decoder[UUID] = decoderDirectly[UUID]
} 
Example 160
Source File: ArrayEncoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.async

import java.time.LocalDate
import java.util.Date

import io.getquill.PostgresAsyncContext
import io.getquill.context.sql.encoding.ArrayEncoding
import org.joda.time.{ DateTime => JodaDateTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime }

trait ArrayEncoders extends ArrayEncoding {
  self: PostgresAsyncContext[_] =>

  implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = arrayRawEncoder[String, Col]
  implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = arrayRawEncoder[BigDecimal, Col]
  implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = arrayRawEncoder[Boolean, Col]
  implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = arrayRawEncoder[Byte, Col]
  implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = arrayRawEncoder[Short, Col]
  implicit def arrayIntEncoder[Col <: Seq[Index]]: Encoder[Col] = arrayRawEncoder[Index, Col]
  implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = arrayRawEncoder[Long, Col]
  implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = arrayRawEncoder[Float, Col]
  implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = arrayRawEncoder[Double, Col]
  implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = arrayRawEncoder[Date, Col]
  implicit def arrayJodaDateTimeEncoder[Col <: Seq[JodaDateTime]]: Encoder[Col] = arrayEncoder[JodaDateTime, Col](_.toLocalDateTime)
  implicit def arrayJodaLocalDateTimeEncoder[Col <: Seq[JodaLocalDateTime]]: Encoder[Col] = arrayRawEncoder[JodaLocalDateTime, Col]
  implicit def arrayJodaLocalDateEncoder[Col <: Seq[JodaLocalDate]]: Encoder[Col] = arrayRawEncoder[JodaLocalDate, Col]
  implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = arrayEncoder[LocalDate, Col](encodeLocalDate.f)

  def arrayEncoder[T, Col <: Seq[T]](mapper: T => Any): Encoder[Col] =
    encoder[Col]((col: Col) => col.toIndexedSeq.map(mapper), SqlTypes.ARRAY)

  def arrayRawEncoder[T, Col <: Seq[T]]: Encoder[Col] = arrayEncoder[T, Col](identity)
} 
Example 161
Source File: ArrayDecoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.async

import java.time.LocalDate
import java.util.Date

import io.getquill.PostgresAsyncContext
import io.getquill.context.sql.encoding.ArrayEncoding
import io.getquill.util.Messages.fail
import org.joda.time.{ DateTime => JodaDateTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime }

import scala.collection.compat._
import scala.reflect.ClassTag

trait ArrayDecoders extends ArrayEncoding {
  self: PostgresAsyncContext[_] =>

  implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] = arrayRawEncoder[String, Col]
  implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = arrayRawEncoder[BigDecimal, Col]
  implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = arrayRawEncoder[Boolean, Col]
  implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = arrayDecoder[Short, Byte, Col](_.toByte)
  implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = arrayRawEncoder[Short, Col]
  implicit def arrayIntDecoder[Col <: Seq[Index]](implicit bf: CBF[Index, Col]): Decoder[Col] = arrayRawEncoder[Index, Col]
  implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = arrayRawEncoder[Long, Col]
  implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = arrayDecoder[Double, Float, Col](_.toFloat)
  implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] = arrayRawEncoder[Double, Col]
  implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = arrayDecoder[JodaLocalDateTime, Date, Col](_.toDate)
  implicit def arrayJodaDateTimeDecoder[Col <: Seq[JodaDateTime]](implicit bf: CBF[JodaDateTime, Col]): Decoder[Col] = arrayDecoder[JodaLocalDateTime, JodaDateTime, Col](_.toDateTime)
  implicit def arrayJodaLocalDateTimeDecoder[Col <: Seq[JodaLocalDateTime]](implicit bf: CBF[JodaLocalDateTime, Col]): Decoder[Col] = arrayRawEncoder[JodaLocalDateTime, Col]
  implicit def arrayJodaLocalDateDecoder[Col <: Seq[JodaLocalDate]](implicit bf: CBF[JodaLocalDate, Col]): Decoder[Col] = arrayRawEncoder[JodaLocalDate, Col]
  implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = arrayDecoder[JodaLocalDate, LocalDate, Col](decodeLocalDate.f)

  def arrayDecoder[I, O, Col <: Seq[O]](mapper: I => O)(implicit bf: CBF[O, Col], iTag: ClassTag[I], oTag: ClassTag[O]): Decoder[Col] =
    AsyncDecoder[Col](SqlTypes.ARRAY)(new BaseDecoder[Col] {
      def apply(index: Index, row: ResultRow): Col = {
        row(index) match {
          case seq: IndexedSeq[Any] => seq.foldLeft(bf.newBuilder) {
            case (b, x: I) => b += mapper(x)
            case (_, x)    => fail(s"Array at index $index contains element of ${x.getClass.getCanonicalName}, but expected $iTag")
          }.result()
          case value => fail(
            s"Value '$value' at index $index is not an array so it cannot be decoded to collection of $oTag"
          )
        }
      }
    })

  def arrayRawEncoder[T: ClassTag, Col <: Seq[T]](implicit bf: CBF[T, Col]): Decoder[Col] =
    arrayDecoder[T, T, Col](identity)
} 
Example 162
Source File: PostgresDecoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.ndbc

import java.time._
import java.util.{ Date, UUID }

import scala.collection.compat._
import scala.language.implicitConversions
import scala.math.BigDecimal.javaBigDecimal2bigDecimal

import io.getquill.context.sql.encoding.ArrayEncoding
import io.trane.ndbc.PostgresRow
import io.trane.ndbc.value.Value

class Default[+T](val default: T)

object Default {
  implicit def defaultNull[T <: AnyRef]: Default[T] = new Default[T](null.asInstanceOf[T])
  implicit def defaultNumeric[T <: Numeric[_]](n: T) = new Default[T](0.asInstanceOf[T])
  implicit object DefaultBoolean extends Default[Boolean](false)

  def value[A](implicit value: Default[A]): A = value.default
}

trait PostgresDecoders {
  this: NdbcContext[_, _, _, PostgresRow] with ArrayEncoding =>

  type Decoder[T] = BaseDecoder[T]

  protected val zoneOffset: ZoneOffset

  def decoder[T, U](f: PostgresRow => Int => T)(implicit map: T => U): Decoder[U] =
    (index, row) =>
      row.column(index) match {
        case Value.NULL => Default.value[U]
        case _          => map(f(row)(index))
      }

  def arrayDecoder[T, U, Col <: Seq[U]](f: PostgresRow => Int => Array[T])(implicit map: T => U, bf: CBF[U, Col]): Decoder[Col] =
    (index, row) => {
      f(row)(index).foldLeft(bf.newBuilder) {
        case (b, v) => b += map(v)
      }.result()
    }

  implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], d: Decoder[I]): Decoder[O] =
    mappedBaseDecoder(mapped, d)

  implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] =
    (idx, row) =>
      row.column(idx) match {
        case Value.NULL => None
        case value      => Option(d(idx, row))
      }

  private implicit def toDate(v: LocalDateTime): Date = Date.from(v.toInstant(zoneOffset))

  implicit val uuidDecoder: Decoder[UUID] = decoder(_.getUUID)
  implicit val stringDecoder: Decoder[String] = decoder(_.getString)
  implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoder(_.getBigDecimal)
  implicit val booleanDecoder: Decoder[Boolean] = decoder(_.getBoolean)
  implicit val byteDecoder: Decoder[Byte] = decoder(_.getByte)
  implicit val shortDecoder: Decoder[Short] = decoder(_.getShort)
  implicit val intDecoder: Decoder[Int] = decoder(_.getInteger)
  implicit val longDecoder: Decoder[Long] = decoder(_.getLong)
  implicit val floatDecoder: Decoder[Float] = decoder(_.getFloat)
  implicit val doubleDecoder: Decoder[Double] = decoder(_.getDouble)
  implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoder(_.getByteArray)
  implicit val dateDecoder: Decoder[Date] = decoder(_.getLocalDateTime)
  implicit val localDateDecoder: Decoder[LocalDate] = decoder(_.getLocalDate)
  implicit val localDateTimeDecoder: Decoder[LocalDateTime] = decoder(_.getLocalDateTime)
  implicit val offsetTimeDecoder: Decoder[OffsetTime] = decoder(_.getOffsetTime)

  implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] = arrayDecoder[String, String, Col](_.getStringArray)
  implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = arrayDecoder[java.math.BigDecimal, BigDecimal, Col](_.getBigDecimalArray)
  implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = arrayDecoder[java.lang.Boolean, Boolean, Col](_.getBooleanArray)
  implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = arrayDecoder[Byte, Byte, Col](_.getByteArray)
  implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = arrayDecoder[java.lang.Short, Short, Col](_.getShortArray)
  implicit def arrayIntDecoder[Col <: Seq[Int]](implicit bf: CBF[Int, Col]): Decoder[Col] = arrayDecoder[java.lang.Integer, Int, Col](_.getIntegerArray)
  implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = arrayDecoder[java.lang.Long, Long, Col](_.getLongArray)
  implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = arrayDecoder[java.lang.Float, Float, Col](_.getFloatArray)
  implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] = arrayDecoder[java.lang.Double, Double, Col](_.getDoubleArray)
  implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = arrayDecoder[LocalDateTime, Date, Col](_.getLocalDateTimeArray)
  implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = arrayDecoder[LocalDate, LocalDate, Col](_.getLocalDateArray)
} 
Example 163
Source File: PostgresEncoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.ndbc

import java.time.{ LocalDate, LocalDateTime, ZoneOffset }
import java.util.{ Date, UUID }

import io.getquill.dsl.CoreDsl
import io.trane.ndbc.PostgresPreparedStatement

import scala.language.implicitConversions
import scala.reflect.ClassTag

trait LowPriorityPostgresImplicits {
  this: CoreDsl =>

  implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: BaseEncoder[O]): BaseEncoder[I] =
    mappedBaseEncoder(mapped, e)
}

trait PostgresEncoders extends LowPriorityPostgresImplicits with io.getquill.dsl.LowPriorityImplicits {
  this: NdbcContext[_, _, PostgresPreparedStatement, _] =>

  type Encoder[T] = BaseEncoder[T]

  protected val zoneOffset: ZoneOffset

  def encoder[T, U](f: PostgresPreparedStatement => (Int, U) => PostgresPreparedStatement)(implicit ev: T => U): Encoder[T] =
    (idx, v, ps) =>
      if (v == null) ps.setNull(idx)
      else f(ps)(idx, v)

  def arrayEncoder[T, U: ClassTag, Col <: Seq[T]](f: PostgresPreparedStatement => (Int, Array[U]) => PostgresPreparedStatement)(ev: T => U): Encoder[Col] =
    (idx, v, ps) =>
      if (v == null) ps.setNull(idx)
      else f(ps)(idx, v.map(ev).toArray[U])

  implicit override def anyValMappedEncoder[I <: AnyVal, O](implicit mapped: MappedEncoding[I, O], encoder: Encoder[O]): Encoder[I] = mappedEncoder

  implicit def optionEncoder[T](implicit e: Encoder[T]): Encoder[Option[T]] =
    (idx, v, ps) =>
      if (v == null) ps.setNull(idx)
      else v match {
        case None    => ps.setNull(idx)
        case Some(v) => e(idx, v, ps)
      }

  implicit def toLocalDateTime(d: Date) = LocalDateTime.ofInstant(d.toInstant(), zoneOffset)

  implicit val uuidEncoder: Encoder[UUID] = encoder(_.setUUID)
  implicit val stringEncoder: Encoder[String] = encoder(_.setString)
  implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder(_.setBigDecimal)(_.bigDecimal)
  implicit val booleanEncoder: Encoder[Boolean] = encoder(_.setBoolean)
  implicit val byteEncoder: Encoder[Byte] = encoder(_.setByte)
  implicit val shortEncoder: Encoder[Short] = encoder(_.setShort)
  implicit val intEncoder: Encoder[Int] = encoder(_.setInteger)
  implicit val longEncoder: Encoder[Long] = encoder(_.setLong)
  implicit val floatEncoder: Encoder[Float] = encoder(_.setFloat)
  implicit val doubleEncoder: Encoder[Double] = encoder(_.setDouble)
  implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder(_.setByteArray)
  implicit val dateEncoder: Encoder[Date] = encoder(_.setLocalDateTime)
  implicit val localDateEncoder: Encoder[LocalDate] = encoder(_.setLocalDate)
  implicit val localDateTimeEncoder: Encoder[LocalDateTime] = encoder(_.setLocalDateTime)

  implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = arrayEncoder[String, String, Col](_.setStringArray)(identity)
  implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = arrayEncoder[BigDecimal, java.math.BigDecimal, Col](_.setBigDecimalArray)(_.bigDecimal)
  implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = arrayEncoder[Boolean, java.lang.Boolean, Col](_.setBooleanArray)(_.booleanValue)
  implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = arrayEncoder[Byte, java.lang.Short, Col](_.setShortArray)(identity)
  implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = arrayEncoder[Short, java.lang.Short, Col](_.setShortArray)(_.shortValue)
  implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = arrayEncoder[Int, java.lang.Integer, Col](_.setIntegerArray)(_.intValue)
  implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = arrayEncoder[Long, java.lang.Long, Col](_.setLongArray)(_.longValue)
  implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = arrayEncoder[Float, java.lang.Float, Col](_.setFloatArray)(_.floatValue)
  implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = arrayEncoder[Double, java.lang.Double, Col](_.setDoubleArray)(_.doubleValue)
  implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = arrayEncoder[Date, LocalDateTime, Col](_.setLocalDateTimeArray)(identity)
  implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = arrayEncoder[LocalDate, LocalDate, Col](_.setLocalDateArray)(identity)
} 
Example 164
Source File: Encoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.jasync

import java.time.{ LocalDate, LocalDateTime, LocalTime, OffsetDateTime, ZoneId, ZonedDateTime }
import java.util.Date

import org.joda.time.{ DateTime => JodaDateTime, DateTimeZone => JodaDateTimeZone, LocalTime => JodaLocalTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime }

trait Encoders {
  this: JAsyncContext[_, _, _] =>

  type Encoder[T] = AsyncEncoder[T]

  type EncoderSqlType = SqlTypes.SqlTypes

  case class AsyncEncoder[T](sqlType: DecoderSqlType)(implicit encoder: BaseEncoder[T])
    extends BaseEncoder[T] {
    override def apply(index: Index, value: T, row: PrepareRow) =
      encoder.apply(index, value, row)
  }

  def encoder[T](sqlType: DecoderSqlType): Encoder[T] =
    encoder(identity[T], sqlType)

  def encoder[T](f: T => Any, sqlType: DecoderSqlType): Encoder[T] =
    AsyncEncoder[T](sqlType)(new BaseEncoder[T] {
      def apply(index: Index, value: T, row: PrepareRow) =
        row :+ f(value)
    })

  implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] =
    AsyncEncoder(e.sqlType)(new BaseEncoder[I] {
      def apply(index: Index, value: I, row: PrepareRow) =
        e(index, mapped.f(value), row)
    })

  implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] =
    AsyncEncoder(d.sqlType)(new BaseEncoder[Option[T]] {
      def apply(index: Index, value: Option[T], row: PrepareRow) = {
        value match {
          case None    => nullEncoder(index, null, row)
          case Some(v) => d(index, v, row)
        }
      }
    })

  private[this] val nullEncoder: Encoder[Null] = encoder[Null](SqlTypes.NULL)

  implicit val stringEncoder: Encoder[String] = encoder[String](SqlTypes.VARCHAR)
  implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal]((bd: BigDecimal) => bd.bigDecimal, SqlTypes.REAL)
  implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean](SqlTypes.BOOLEAN)
  implicit val byteEncoder: Encoder[Byte] = encoder[Byte](SqlTypes.TINYINT)
  implicit val shortEncoder: Encoder[Short] = encoder[Short](SqlTypes.SMALLINT)
  implicit val intEncoder: Encoder[Int] = encoder[Int](SqlTypes.INTEGER)
  implicit val longEncoder: Encoder[Long] = encoder[Long](SqlTypes.BIGINT)
  implicit val floatEncoder: Encoder[Float] = encoder[Float](SqlTypes.FLOAT)
  implicit val doubleEncoder: Encoder[Double] = encoder[Double](SqlTypes.DOUBLE)
  implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]](SqlTypes.VARBINARY)
  implicit val jodaDateTimeEncoder: Encoder[JodaDateTime] = encoder[JodaDateTime](SqlTypes.TIMESTAMP)
  implicit val jodaLocalDateEncoder: Encoder[JodaLocalDate] = encoder[JodaLocalDate](SqlTypes.DATE)
  implicit val jodaLocalDateTimeEncoder: Encoder[JodaLocalDateTime] = encoder[JodaLocalDateTime](SqlTypes.TIMESTAMP)
  implicit val dateEncoder: Encoder[Date] = encoder[Date]((d: Date) => new JodaLocalDateTime(d), SqlTypes.TIMESTAMP)

  implicit val encodeZonedDateTime: MappedEncoding[ZonedDateTime, JodaDateTime] =
    MappedEncoding(zdt => new JodaDateTime(zdt.toInstant.toEpochMilli, JodaDateTimeZone.forID(zdt.getZone.getId)))

  implicit val encodeOffsetDateTime: MappedEncoding[OffsetDateTime, JodaDateTime] =
    MappedEncoding(odt => new JodaDateTime(odt.toInstant.toEpochMilli, JodaDateTimeZone.forID(odt.getOffset.getId)))

  implicit val encodeLocalDate: MappedEncoding[LocalDate, JodaLocalDate] =
    MappedEncoding(ld => new JodaLocalDate(ld.getYear, ld.getMonthValue, ld.getDayOfMonth))

  implicit val encodeLocalTime: MappedEncoding[LocalTime, JodaLocalTime] =
    MappedEncoding(lt => new JodaLocalTime(lt.getHour, lt.getMinute, lt.getSecond))

  implicit val encodeLocalDateTime: MappedEncoding[LocalDateTime, JodaLocalDateTime] =
    MappedEncoding(ldt => new JodaLocalDateTime(ldt.atZone(ZoneId.systemDefault()).toInstant.toEpochMilli))

  implicit val localDateEncoder: Encoder[LocalDate] = mappedEncoder(encodeLocalDate, jodaLocalDateEncoder)
} 
Example 165
Source File: CassandraMirrorContext.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill

import java.util.Date

import com.datastax.driver.core.LocalDate
import io.getquill.context.cassandra.encoding.{ CassandraMapper, CassandraType }
import io.getquill.context.cassandra.{ CassandraContext, CqlIdiom, Udt }

import scala.reflect.ClassTag

class CassandraMirrorContextWithQueryProbing extends CassandraMirrorContext(Literal) with QueryProbing

class CassandraMirrorContext[Naming <: NamingStrategy](naming: Naming)
  extends MirrorContext[CqlIdiom, Naming](CqlIdiom, naming) with CassandraContext[Naming] {

  implicit val timestampDecoder: Decoder[Date] = decoder[Date]
  implicit val timestampEncoder: Encoder[Date] = encoder[Date]
  implicit val cassandraLocalDateDecoder: Decoder[LocalDate] = decoder[LocalDate]
  implicit val cassandraLocalDateEncoder: Encoder[LocalDate] = encoder[LocalDate]

  implicit def listDecoder[T, Cas: ClassTag](implicit mapper: CassandraMapper[Cas, T]): Decoder[List[T]] = decoderUnsafe[List[T]]
  implicit def setDecoder[T, Cas: ClassTag](implicit mapper: CassandraMapper[Cas, T]): Decoder[Set[T]] = decoderUnsafe[Set[T]]
  implicit def mapDecoder[K, V, KCas: ClassTag, VCas: ClassTag](
    implicit
    keyMapper: CassandraMapper[KCas, K],
    valMapper: CassandraMapper[VCas, V]
  ): Decoder[Map[K, V]] = decoderUnsafe[Map[K, V]]

  implicit def listEncoder[T, Cas](implicit mapper: CassandraMapper[T, Cas]): Encoder[List[T]] = encoder[List[T]]
  implicit def setEncoder[T, Cas](implicit mapper: CassandraMapper[T, Cas]): Encoder[Set[T]] = encoder[Set[T]]
  implicit def mapEncoder[K, V, KCas, VCas](
    implicit
    keyMapper: CassandraMapper[K, KCas],
    valMapper: CassandraMapper[V, VCas]
  ): Encoder[Map[K, V]] = encoder[Map[K, V]]

  implicit def udtCassandraType[T <: Udt]: CassandraType[T] = CassandraType.of[T]
  implicit def udtDecoder[T <: Udt: ClassTag]: Decoder[T] = decoder[T]
  implicit def udtEncoder[T <: Udt]: Encoder[T] = encoder[T]
} 
Example 166
Source File: CassandraContext.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra

import java.util.{ Date, UUID }

import com.datastax.driver.core.LocalDate
import io.getquill.NamingStrategy
import io.getquill.context.Context
import io.getquill.context.cassandra.encoding.{ CassandraMapper, Encodings }

import scala.reflect.ClassTag

trait CassandraContext[N <: NamingStrategy]
  extends Context[CqlIdiom, N]
  with Encodings
  with UdtMetaDsl
  with Ops {

  implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]]
  implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]]

  implicit val stringDecoder: Decoder[String]
  implicit val bigDecimalDecoder: Decoder[BigDecimal]
  implicit val booleanDecoder: Decoder[Boolean]
  implicit val byteDecoder: Decoder[Byte]
  implicit val shortDecoder: Decoder[Short]
  implicit val intDecoder: Decoder[Int]
  implicit val longDecoder: Decoder[Long]
  implicit val floatDecoder: Decoder[Float]
  implicit val doubleDecoder: Decoder[Double]
  implicit val byteArrayDecoder: Decoder[Array[Byte]]
  implicit val uuidDecoder: Decoder[UUID]
  implicit val timestampDecoder: Decoder[Date]
  implicit val cassandraLocalDateDecoder: Decoder[LocalDate]

  implicit val stringEncoder: Encoder[String]
  implicit val bigDecimalEncoder: Encoder[BigDecimal]
  implicit val booleanEncoder: Encoder[Boolean]
  implicit val byteEncoder: Encoder[Byte]
  implicit val shortEncoder: Encoder[Short]
  implicit val intEncoder: Encoder[Int]
  implicit val longEncoder: Encoder[Long]
  implicit val floatEncoder: Encoder[Float]
  implicit val doubleEncoder: Encoder[Double]
  implicit val byteArrayEncoder: Encoder[Array[Byte]]
  implicit val uuidEncoder: Encoder[UUID]
  implicit val timestampEncoder: Encoder[Date]
  implicit val cassandraLocalDateEncoder: Encoder[LocalDate]

  implicit def listDecoder[T, Cas: ClassTag](implicit mapper: CassandraMapper[Cas, T]): Decoder[List[T]]
  implicit def setDecoder[T, Cas: ClassTag](implicit mapper: CassandraMapper[Cas, T]): Decoder[Set[T]]
  implicit def mapDecoder[K, V, KCas: ClassTag, VCas: ClassTag](
    implicit
    keyMapper: CassandraMapper[KCas, K],
    valMapper: CassandraMapper[VCas, V]
  ): Decoder[Map[K, V]]

  implicit def listEncoder[T, Cas](implicit mapper: CassandraMapper[T, Cas]): Encoder[List[T]]
  implicit def setEncoder[T, Cas](implicit mapper: CassandraMapper[T, Cas]): Encoder[Set[T]]
  implicit def mapEncoder[K, V, KCas, VCas](
    implicit
    keyMapper: CassandraMapper[K, KCas],
    valMapper: CassandraMapper[V, VCas]
  ): Encoder[Map[K, V]]
} 
Example 167
Source File: Decoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra.encoding

import java.util.{ Date, UUID }

import com.datastax.driver.core.LocalDate
import io.getquill.context.cassandra.CassandraSessionContext
import io.getquill.util.Messages.fail

trait Decoders extends CollectionDecoders {
  this: CassandraSessionContext[_] =>

  type Decoder[T] = CassandraDecoder[T]

  case class CassandraDecoder[T](decoder: BaseDecoder[T]) extends BaseDecoder[T] {
    override def apply(index: Index, row: ResultRow) =
      decoder(index, row)
  }

  def decoder[T](d: BaseDecoder[T]): Decoder[T] = CassandraDecoder(
    (index, row) =>
      if (row.isNull(index) && !row.getColumnDefinitions.getType(index).isCollection)
        fail(s"Expected column at index $index to be defined but is was empty")
      else d(index, row)

  )

  def decoder[T](f: ResultRow => Index => T): Decoder[T] =
    decoder((index, row) => f(row)(index))

  implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] =
    CassandraDecoder((index, row) => {
      row.isNull(index) match {
        case true  => None
        case false => Some(d(index, row))
      }
    })

  implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], decoder: Decoder[I]): Decoder[O] =
    CassandraDecoder(mappedBaseDecoder(mapped, decoder.decoder))

  implicit val stringDecoder: Decoder[String] = decoder(_.getString)
  implicit val bigDecimalDecoder: Decoder[BigDecimal] =
    decoder((index, row) => row.getDecimal(index))
  implicit val booleanDecoder: Decoder[Boolean] = decoder(_.getBool)
  implicit val byteDecoder: Decoder[Byte] = decoder(_.getByte)
  implicit val shortDecoder: Decoder[Short] = decoder(_.getShort)
  implicit val intDecoder: Decoder[Int] = decoder(_.getInt)
  implicit val longDecoder: Decoder[Long] = decoder(_.getLong)
  implicit val floatDecoder: Decoder[Float] = decoder(_.getFloat)
  implicit val doubleDecoder: Decoder[Double] = decoder(_.getDouble)
  implicit val byteArrayDecoder: Decoder[Array[Byte]] =
    decoder((index, row) => {
      val bb = row.getBytes(index)
      val b = new Array[Byte](bb.remaining())
      bb.get(b)
      b
    })
  implicit val uuidDecoder: Decoder[UUID] = decoder(_.getUUID)
  implicit val timestampDecoder: Decoder[Date] = decoder(_.getTimestamp)
  implicit val cassandraLocalDateDecoder: Decoder[LocalDate] = decoder(_.getDate)
} 
Example 168
Source File: Encoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra.encoding

import java.nio.ByteBuffer
import java.util.{ Date, UUID }

import com.datastax.driver.core.LocalDate
import io.getquill.context.cassandra.CassandraSessionContext

trait Encoders extends CollectionEncoders {
  this: CassandraSessionContext[_] =>

  type Encoder[T] = CassandraEncoder[T]

  case class CassandraEncoder[T](encoder: BaseEncoder[T]) extends BaseEncoder[T] {
    override def apply(index: Index, value: T, row: PrepareRow) =
      encoder(index, value, row)
  }

  def encoder[T](e: BaseEncoder[T]): Encoder[T] = CassandraEncoder(e)

  def encoder[T](f: PrepareRow => (Index, T) => PrepareRow): Encoder[T] =
    encoder((index, value, row) => f(row)(index, value))

  private[this] val nullEncoder: Encoder[Null] =
    encoder((index, value, row) => row.setToNull(index))

  implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] =
    encoder { (index, value, row) =>
      value match {
        case None    => nullEncoder(index, null, row)
        case Some(v) => d(index, v, row)
      }
    }

  implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], encoder: Encoder[O]): Encoder[I] =
    CassandraEncoder(mappedBaseEncoder(mapped, encoder.encoder))

  implicit val stringEncoder: Encoder[String] = encoder(_.setString)
  implicit val bigDecimalEncoder: Encoder[BigDecimal] =
    encoder((index, value, row) => row.setDecimal(index, value.bigDecimal))
  implicit val booleanEncoder: Encoder[Boolean] = encoder(_.setBool)
  implicit val byteEncoder: Encoder[Byte] = encoder(_.setByte)
  implicit val shortEncoder: Encoder[Short] = encoder(_.setShort)
  implicit val intEncoder: Encoder[Int] = encoder(_.setInt)
  implicit val longEncoder: Encoder[Long] = encoder(_.setLong)
  implicit val floatEncoder: Encoder[Float] = encoder(_.setFloat)
  implicit val doubleEncoder: Encoder[Double] = encoder(_.setDouble)
  implicit val byteArrayEncoder: Encoder[Array[Byte]] =
    encoder((index, value, row) => row.setBytes(index, ByteBuffer.wrap(value)))
  implicit val uuidEncoder: Encoder[UUID] = encoder(_.setUUID)
  implicit val timestampEncoder: Encoder[Date] = encoder(_.setTimestamp)
  implicit val cassandraLocalDateEncoder: Encoder[LocalDate] = encoder(_.setDate)
} 
Example 169
Source File: Encodings.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra.encoding

import java.time.{ Instant, LocalDate, ZonedDateTime, ZoneId }
import java.util.Date

import com.datastax.driver.core.{ LocalDate => CasLocalDate }
import io.getquill.context.cassandra.CassandraContext

trait Encodings extends CassandraMapperConversions with CassandraTypes {
  this: CassandraContext[_] =>

  protected val zoneId = ZoneId.systemDefault

  implicit val encodeJava8LocalDate: MappedEncoding[LocalDate, CasLocalDate] = MappedEncoding(ld =>
    CasLocalDate.fromYearMonthDay(ld.getYear, ld.getMonthValue, ld.getDayOfMonth))
  implicit val decodeJava8LocalDate: MappedEncoding[CasLocalDate, LocalDate] = MappedEncoding(ld =>
    LocalDate.of(ld.getYear, ld.getMonth, ld.getDay))

  implicit val encodeJava8Instant: MappedEncoding[Instant, Date] = MappedEncoding(Date.from)
  implicit val decodeJava8Instant: MappedEncoding[Date, Instant] = MappedEncoding(_.toInstant)

  implicit val encodeJava8ZonedDateTime: MappedEncoding[ZonedDateTime, Date] = MappedEncoding(zdt =>
    Date.from(zdt.toInstant))
  implicit val decodeJava8ZonedDateTime: MappedEncoding[Date, ZonedDateTime] = MappedEncoding(d =>
    ZonedDateTime.ofInstant(d.toInstant, zoneId))
} 
Example 170
Source File: SetsEncodingSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra

import java.util.{ Date, UUID }

import com.datastax.driver.core.LocalDate

class SetsEncodingSpec extends CollectionsSpec {
  val ctx = testSyncDB
  import ctx._

  case class SetsEntity(
    id:         Int,
    texts:      Set[String],
    decimals:   Set[BigDecimal],
    bools:      Set[Boolean],
    ints:       Set[Int],
    longs:      Set[Long],
    floats:     Set[Float],
    doubles:    Set[Double],
    dates:      Set[LocalDate],
    timestamps: Set[Date],
    uuids:      Set[UUID]
  )
  val e = SetsEntity(1, Set("c"), Set(BigDecimal(1.33)), Set(true), Set(1, 2), Set(2, 3), Set(1f, 3f),
    Set(5d), Set(LocalDate.fromMillisSinceEpoch(System.currentTimeMillis())),
    Set(new Date), Set(UUID.randomUUID()))
  val q = quote(query[SetsEntity])

  "Set encoders/decoders" in {
    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Empty sets and optional fields" in {
    case class Entity(id: Int, texts: Option[Set[String]], bools: Option[Set[Boolean]], ints: Set[Int])
    val e = Entity(1, Some(Set("1", "2")), None, Set())
    val q = quote(querySchema[Entity]("SetsEntity"))

    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Mapped encoding for CassandraType" in {
    case class StrEntity(id: Int, texts: Set[StrWrap])
    val e = StrEntity(1, Set("1", "2").map(StrWrap.apply))
    val q = quote(querySchema[StrEntity]("SetsEntity"))

    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Mapped encoding for CassandraMapper types" in {
    case class IntEntity(id: Int, ints: Set[IntWrap])
    val e = IntEntity(1, Set(1, 2).map(IntWrap.apply))
    val q = quote(querySchema[IntEntity]("SetsEntity"))

    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Blob (Array[Byte]) support" in {
    case class BlobsEntity(id: Int, blobs: Set[Array[Byte]])
    val e = BlobsEntity(1, Set(Array(1.toByte, 2.toByte), Array(2.toByte)))
    val q = quote(querySchema[BlobsEntity]("SetsEntity"))

    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1))
      .head.blobs.map(_.toSet) mustBe e.blobs.map(_.toSet)
  }

  "Set in where clause" in {
    val e = SetFrozen(Set(1, 2))
    ctx.run(setFroz.insert(lift(e)))
    ctx.run(setFroz.filter(_.id == lift(Set(1, 2)))) mustBe List(e)
    ctx.run(setFroz.filter(_.id == lift(Set(1)))) mustBe List()
  }

  override protected def beforeEach(): Unit = {
    ctx.run(q.delete)
    ctx.run(setFroz.delete)
  }
} 
Example 171
Source File: ListsEncodingSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra

import java.util.{ Date, UUID }

import com.datastax.driver.core.LocalDate

class ListsEncodingSpec extends CollectionsSpec {
  val ctx = testSyncDB
  import ctx._

  case class ListsEntity(
    id:         Int,
    texts:      List[String],
    decimals:   List[BigDecimal],
    bools:      List[Boolean],
    bytes:      List[Byte],
    shorts:     List[Short],
    ints:       List[Int],
    longs:      List[Long],
    floats:     List[Float],
    doubles:    List[Double],
    dates:      List[LocalDate],
    timestamps: List[Date],
    uuids:      List[UUID]
  )
  val e = ListsEntity(1, List("c"), List(BigDecimal(1.33)), List(true), List(0, 1), List(3, 2), List(1, 2), List(2, 3),
    List(1f, 3f), List(5d), List(LocalDate.fromMillisSinceEpoch(System.currentTimeMillis())),
    List(new Date), List(UUID.randomUUID()))
  val q = quote(query[ListsEntity])

  "List encoders/decoders for CassandraTypes and CassandraMappers" in {
    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Empty lists and optional fields" in {
    case class Entity(id: Int, texts: Option[List[String]], bools: Option[List[Boolean]], ints: List[Int])
    val e = Entity(1, Some(List("1", "2")), None, Nil)
    val q = quote(querySchema[Entity]("ListsEntity"))

    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Mapped encoding for CassandraType" in {
    case class StrEntity(id: Int, texts: List[StrWrap])
    val e = StrEntity(1, List("1", "2").map(StrWrap.apply))
    val q = quote(querySchema[StrEntity]("ListsEntity"))

    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Mapped encoding for CassandraMapper types" in {
    case class IntEntity(id: Int, ints: List[IntWrap])
    val e = IntEntity(1, List(1, 2).map(IntWrap.apply))
    val q = quote(querySchema[IntEntity]("ListsEntity"))

    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Blob (Array[Byte]) support" in {
    case class BlobsEntity(id: Int, blobs: List[Array[Byte]])
    val e = BlobsEntity(1, List(Array(1.toByte, 2.toByte), Array(2.toByte)))
    val q = quote(querySchema[BlobsEntity]("ListsEntity"))

    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1))
      .head.blobs.map(_.toList) mustBe e.blobs.map(_.toList)
  }

  "List in where clause / contains" in {
    val e = ListFrozen(List(1, 2))
    ctx.run(listFroz.insert(lift(e)))
    ctx.run(listFroz.filter(_.id == lift(List(1, 2)))) mustBe List(e)
    ctx.run(listFroz.filter(_.id == lift(List(1)))) mustBe Nil

    ctx.run(listFroz.filter(_.id.contains(2)).allowFiltering) mustBe List(e)
    ctx.run(listFroz.filter(_.id.contains(3)).allowFiltering) mustBe Nil
  }

  override protected def beforeEach(): Unit = {
    ctx.run(q.delete)
    ctx.run(listFroz.delete)
  }
} 
Example 172
Source File: MapsEncodingSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.cassandra

import java.util.{ Date, UUID }

import com.datastax.driver.core.LocalDate

class MapsEncodingSpec extends CollectionsSpec {
  val ctx = testSyncDB
  import ctx._

  case class MapsEntity(
    id:            Int,
    textDecimal:   Map[String, BigDecimal],
    intDouble:     Map[Int, Double],
    longFloat:     Map[Long, Float],
    boolDate:      Map[Boolean, LocalDate],
    uuidTimestamp: Map[UUID, Date]
  )
  val e = MapsEntity(1, Map("1" -> BigDecimal(1)), Map(1 -> 1d, 2 -> 2d, 3 -> 3d), Map(1L -> 3f),
    Map(true -> LocalDate.fromMillisSinceEpoch(System.currentTimeMillis())),
    Map(UUID.randomUUID() -> new Date))
  val q = quote(query[MapsEntity])

  "Map encoders/decoders" in {
    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Empty maps and optional fields" in {
    case class Entity(
      id:          Int,
      textDecimal: Option[Map[String, BigDecimal]],
      intDouble:   Option[Map[Int, Double]],
      longFloat:   Map[Long, Float]
    )
    val e = Entity(1, Some(Map("1" -> BigDecimal(1))), None, Map())
    val q = quote(querySchema[Entity]("MapsEntity"))

    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Mapped encoding for CassandraType" in {
    case class StrEntity(id: Int, textDecimal: Map[StrWrap, BigDecimal])
    val e = StrEntity(1, Map(StrWrap("1") -> BigDecimal(1)))
    val q = quote(querySchema[StrEntity]("MapsEntity"))

    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Mapped encoding for CassandraMapper types" in {
    case class IntEntity(id: Int, intDouble: Map[IntWrap, Double])
    val e = IntEntity(1, Map(IntWrap(1) -> 1d))
    val q = quote(querySchema[IntEntity]("MapsEntity"))

    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Map in where clause / contains" in {
    val e = MapFrozen(Map(1 -> true))
    ctx.run(mapFroz.insert(lift(e)))
    ctx.run(mapFroz.filter(_.id == lift(Map(1 -> true)))) mustBe List(e)
    ctx.run(mapFroz.filter(_.id == lift(Map(1 -> false)))) mustBe Nil

    ctx.run(mapFroz.filter(_.id.contains(1)).allowFiltering) mustBe List(e)
    ctx.run(mapFroz.filter(_.id.contains(2)).allowFiltering) mustBe Nil
  }

  "Map.containsValue" in {
    val e = MapFrozen(Map(1 -> true))
    ctx.run(mapFroz.insert(lift(e)))

    ctx.run(mapFroz.filter(_.id.containsValue(true)).allowFiltering) mustBe List(e)
    ctx.run(mapFroz.filter(_.id.containsValue(false)).allowFiltering) mustBe Nil
  }

  override protected def beforeEach(): Unit = {
    ctx.run(q.delete)
    ctx.run(mapFroz.delete)
  }
} 
Example 173
Source File: ArrayEncoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.jasync

import java.sql.Timestamp
import java.time.LocalDate
import java.util.Date

import io.getquill.PostgresJAsyncContext
import io.getquill.context.sql.encoding.ArrayEncoding
import org.joda.time.{ DateTime => JodaDateTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime }

trait ArrayEncoders extends ArrayEncoding {
  self: PostgresJAsyncContext[_] =>

  implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = arrayRawEncoder[String, Col]
  implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = arrayRawEncoder[BigDecimal, Col]
  implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = arrayRawEncoder[Boolean, Col]
  implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = arrayRawEncoder[Byte, Col]
  implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = arrayRawEncoder[Short, Col]
  implicit def arrayIntEncoder[Col <: Seq[Index]]: Encoder[Col] = arrayRawEncoder[Index, Col]
  implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = arrayRawEncoder[Long, Col]
  implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = arrayRawEncoder[Float, Col]
  implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = arrayRawEncoder[Double, Col]
  implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = arrayEncoder[Date, Col](d => Timestamp.from(d.toInstant))
  implicit def arrayJodaDateTimeEncoder[Col <: Seq[JodaDateTime]]: Encoder[Col] = arrayEncoder[JodaDateTime, Col](_.toLocalDateTime)
  implicit def arrayJodaLocalDateTimeEncoder[Col <: Seq[JodaLocalDateTime]]: Encoder[Col] = arrayRawEncoder[JodaLocalDateTime, Col]
  implicit def arrayJodaLocalDateEncoder[Col <: Seq[JodaLocalDate]]: Encoder[Col] = arrayRawEncoder[JodaLocalDate, Col]
  implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = arrayEncoder[LocalDate, Col](encodeLocalDate.f)

  def arrayEncoder[T, Col <: Seq[T]](mapper: T => Any): Encoder[Col] =
    encoder[Col]((col: Col) => col.toIndexedSeq.map(mapper).mkString("{", ",", "}"), SqlTypes.ARRAY)

  def arrayRawEncoder[T, Col <: Seq[T]]: Encoder[Col] = arrayEncoder[T, Col](identity)

} 
Example 174
Source File: ArrayDecoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.jasync

import java.time.LocalDate
import java.util
import java.util.Date

import io.getquill.PostgresJAsyncContext
import io.getquill.context.sql.encoding.ArrayEncoding
import io.getquill.util.Messages.fail
import org.joda.time.{ DateTime => JodaDateTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime }
import scala.reflect.ClassTag
import scala.collection.compat._
import scala.jdk.CollectionConverters._

trait ArrayDecoders extends ArrayEncoding {
  self: PostgresJAsyncContext[_] =>

  implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] = arrayRawEncoder[String, Col]
  implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = arrayDecoder[java.math.BigDecimal, BigDecimal, Col](BigDecimal.javaBigDecimal2bigDecimal)
  implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = arrayRawEncoder[Boolean, Col]
  implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = arrayDecoder[Short, Byte, Col](_.toByte)
  implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = arrayRawEncoder[Short, Col]
  implicit def arrayIntDecoder[Col <: Seq[Index]](implicit bf: CBF[Index, Col]): Decoder[Col] = arrayRawEncoder[Index, Col]
  implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = arrayRawEncoder[Long, Col]
  implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = arrayDecoder[Double, Float, Col](_.toFloat)
  implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] = arrayRawEncoder[Double, Col]
  implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = arrayDecoder[JodaLocalDateTime, Date, Col](_.toDate)
  implicit def arrayJodaDateTimeDecoder[Col <: Seq[JodaDateTime]](implicit bf: CBF[JodaDateTime, Col]): Decoder[Col] = arrayDecoder[JodaLocalDateTime, JodaDateTime, Col](_.toDateTime)
  implicit def arrayJodaLocalDateTimeDecoder[Col <: Seq[JodaLocalDateTime]](implicit bf: CBF[JodaLocalDateTime, Col]): Decoder[Col] = arrayRawEncoder[JodaLocalDateTime, Col]
  implicit def arrayJodaLocalDateDecoder[Col <: Seq[JodaLocalDate]](implicit bf: CBF[JodaLocalDate, Col]): Decoder[Col] = arrayRawEncoder[JodaLocalDate, Col]
  implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = arrayDecoder[JodaLocalDate, LocalDate, Col](decodeLocalDate.f)

  def arrayDecoder[I, O, Col <: Seq[O]](mapper: I => O)(implicit bf: CBF[O, Col], iTag: ClassTag[I], oTag: ClassTag[O]): Decoder[Col] =
    AsyncDecoder[Col](SqlTypes.ARRAY)(new BaseDecoder[Col] {
      def apply(index: Index, row: ResultRow): Col = row.get(index) match {
        case seq: util.ArrayList[_] =>
          seq.asScala.foldLeft(bf.newBuilder) {
            case (b, x: I) => b += mapper(x)
            case (_, x)    => fail(s"Array at index $index contains element of ${x.getClass.getCanonicalName}, but expected $iTag")
          }.result()
        case value => fail(
          s"Value '$value' at index $index is not an array so it cannot be decoded to collection of $oTag"
        )
      }
    })

  def arrayRawEncoder[T: ClassTag, Col <: Seq[T]](implicit bf: CBF[T, Col]): Decoder[Col] =
    arrayDecoder[T, T, Col](identity)
} 
Example 175
Source File: MirrorDecoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.mirror

import java.time.LocalDate
import java.util.{ Date, UUID }

import scala.reflect.ClassTag
import io.getquill.context.Context

trait MirrorDecoders {
  this: Context[_, _] =>

  override type PrepareRow = Row
  override type ResultRow = Row
  override type Decoder[T] = MirrorDecoder[T]

  case class MirrorDecoder[T](decoder: BaseDecoder[T]) extends BaseDecoder[T] {
    override def apply(index: Index, row: ResultRow) =
      decoder(index, row)
  }

  def decoder[T: ClassTag]: Decoder[T] = MirrorDecoder((index: Index, row: ResultRow) => row[T](index))

  def decoderUnsafe[T]: Decoder[T] = MirrorDecoder((index: Index, row: ResultRow) => row.data(index).asInstanceOf[T])

  implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], d: Decoder[I]): Decoder[O] =
    MirrorDecoder((index: Index, row: ResultRow) => mapped.f(d.apply(index, row)))

  implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] =
    MirrorDecoder((index: Index, row: ResultRow) =>
      row[Option[Any]](index) match {
        case Some(v) => Some(d(0, Row(v)))
        case None    => None
      })

  implicit val stringDecoder: Decoder[String] = decoder[String]
  implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoder[BigDecimal]
  implicit val booleanDecoder: Decoder[Boolean] = decoder[Boolean]
  implicit val byteDecoder: Decoder[Byte] = decoder[Byte]
  implicit val shortDecoder: Decoder[Short] = decoder[Short]
  implicit val intDecoder: Decoder[Int] = decoder[Int]
  implicit val longDecoder: Decoder[Long] = decoder[Long]
  implicit val floatDecoder: Decoder[Float] = decoder[Float]
  implicit val doubleDecoder: Decoder[Double] = decoder[Double]
  implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoder[Array[Byte]]
  implicit val dateDecoder: Decoder[Date] = decoder[Date]
  implicit val localDateDecoder: Decoder[LocalDate] = decoder[LocalDate]
  implicit val uuidDecoder: Decoder[UUID] = decoder[UUID]
} 
Example 176
Source File: MirrorEncoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.mirror

import java.time.LocalDate
import java.util.{ Date, UUID }

import io.getquill.context.Context

trait MirrorEncoders {
  this: Context[_, _] =>

  override type PrepareRow = Row
  override type ResultRow = Row
  override type Encoder[T] = MirrorEncoder[T]

  case class MirrorEncoder[T](encoder: BaseEncoder[T]) extends BaseEncoder[T] {
    override def apply(index: Index, value: T, row: PrepareRow) =
      encoder(index, value, row)
  }

  def encoder[T]: Encoder[T] = MirrorEncoder((index: Index, value: T, row: PrepareRow) => row.add(value))

  implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] =
    MirrorEncoder((index: Index, value: I, row: PrepareRow) => e(index, mapped.f(value), row))

  implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] =
    MirrorEncoder((index: Index, value: Option[T], row: PrepareRow) => {
      value match {
        case None    => row.add(None)
        case Some(v) => row.add(d(index, v, Row()).data.headOption)
      }
    })

  implicit val stringEncoder: Encoder[String] = encoder[String]
  implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal]
  implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean]
  implicit val byteEncoder: Encoder[Byte] = encoder[Byte]
  implicit val shortEncoder: Encoder[Short] = encoder[Short]
  implicit val intEncoder: Encoder[Int] = encoder[Int]
  implicit val longEncoder: Encoder[Long] = encoder[Long]
  implicit val floatEncoder: Encoder[Float] = encoder[Float]
  implicit val doubleEncoder: Encoder[Double] = encoder[Double]
  implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]]
  implicit val dateEncoder: Encoder[Date] = encoder[Date]
  implicit val localDateEncoder: Encoder[LocalDate] = encoder[LocalDate]
  implicit val uuidEncoder: Encoder[UUID] = encoder[UUID]
} 
Example 177
Source File: FinagleMysqlEncoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.finagle.mysql

import java.sql.Timestamp
import java.time.{ LocalDate, LocalDateTime }
import java.util.{ Date, UUID }

import com.twitter.finagle.mysql.CanBeParameter._
import com.twitter.finagle.mysql.Parameter.wrap
import com.twitter.finagle.mysql._
import io.getquill.FinagleMysqlContext

trait FinagleMysqlEncoders {
  this: FinagleMysqlContext[_] =>

  type Encoder[T] = FinagleMySqlEncoder[T]

  case class FinagleMySqlEncoder[T](encoder: BaseEncoder[T]) extends BaseEncoder[T] {
    override def apply(index: Index, value: T, row: PrepareRow) =
      encoder(index, value, row)
  }

  def encoder[T](f: T => Parameter): Encoder[T] =
    FinagleMySqlEncoder((index, value, row) => row :+ f(value))

  def encoder[T](implicit cbp: CanBeParameter[T]): Encoder[T] =
    encoder[T]((v: T) => v: Parameter)

  private[this] val nullEncoder = encoder((_: Null) => Parameter.NullParameter)

  implicit def optionEncoder[T](implicit e: Encoder[T]): Encoder[Option[T]] =
    FinagleMySqlEncoder { (index, value, row) =>
      value match {
        case None    => nullEncoder.encoder(index, null, row)
        case Some(v) => e.encoder(index, v, row)
      }
    }

  implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] =
    FinagleMySqlEncoder(mappedBaseEncoder(mapped, e.encoder))

  implicit val stringEncoder: Encoder[String] = encoder[String]
  implicit val bigDecimalEncoder: Encoder[BigDecimal] =
    encoder[BigDecimal] { (value: BigDecimal) =>
      BigDecimalValue(value): Parameter
    }
  implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean]
  implicit val byteEncoder: Encoder[Byte] = encoder[Byte]
  implicit val shortEncoder: Encoder[Short] = encoder[Short]
  implicit val intEncoder: Encoder[Int] = encoder[Int]
  implicit val longEncoder: Encoder[Long] = encoder[Long]
  implicit val floatEncoder: Encoder[Float] = encoder[Float]
  implicit val doubleEncoder: Encoder[Double] = encoder[Double]
  implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]]
  implicit val dateEncoder: Encoder[Date] = encoder[Date] {
    (value: Date) => timestampValue(new Timestamp(value.getTime)): Parameter
  }
  implicit val localDateEncoder: Encoder[LocalDate] = encoder[LocalDate] {
    (d: LocalDate) => DateValue(java.sql.Date.valueOf(d)): Parameter
  }
  implicit val localDateTimeEncoder: Encoder[LocalDateTime] = encoder[LocalDateTime] {
    (d: LocalDateTime) => timestampValue(new Timestamp(d.atZone(injectionTimeZone.toZoneId).toInstant.toEpochMilli)): Parameter
  }
  implicit val uuidEncoder: Encoder[UUID] = mappedEncoder(MappedEncoding(_.toString), stringEncoder)
} 
Example 178
Source File: SqlContext.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.sql

import java.time.LocalDate

import io.getquill.idiom.{ Idiom => BaseIdiom }
import java.util.{ Date, UUID }

import io.getquill.context.Context
import io.getquill.context.sql.dsl.SqlDsl
import io.getquill.NamingStrategy

trait SqlContext[Idiom <: BaseIdiom, Naming <: NamingStrategy]
  extends Context[Idiom, Naming]
  with SqlDsl {

  implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]]
  implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]]

  implicit val stringDecoder: Decoder[String]
  implicit val bigDecimalDecoder: Decoder[BigDecimal]
  implicit val booleanDecoder: Decoder[Boolean]
  implicit val byteDecoder: Decoder[Byte]
  implicit val shortDecoder: Decoder[Short]
  implicit val intDecoder: Decoder[Int]
  implicit val longDecoder: Decoder[Long]
  implicit val floatDecoder: Decoder[Float]
  implicit val doubleDecoder: Decoder[Double]
  implicit val byteArrayDecoder: Decoder[Array[Byte]]
  implicit val dateDecoder: Decoder[Date]
  implicit val localDateDecoder: Decoder[LocalDate]
  implicit val uuidDecoder: Decoder[UUID]

  implicit val stringEncoder: Encoder[String]
  implicit val bigDecimalEncoder: Encoder[BigDecimal]
  implicit val booleanEncoder: Encoder[Boolean]
  implicit val byteEncoder: Encoder[Byte]
  implicit val shortEncoder: Encoder[Short]
  implicit val intEncoder: Encoder[Int]
  implicit val longEncoder: Encoder[Long]
  implicit val floatEncoder: Encoder[Float]
  implicit val doubleEncoder: Encoder[Double]
  implicit val byteArrayEncoder: Encoder[Array[Byte]]
  implicit val dateEncoder: Encoder[Date]
  implicit val localDateEncoder: Encoder[LocalDate]
  implicit val uuidEncoder: Encoder[UUID]
} 
Example 179
Source File: ArrayMirrorEncoding.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.sql.encoding.mirror

import java.time.LocalDate
import java.util.Date

import io.getquill.SqlMirrorContext
import io.getquill.context.sql.encoding.ArrayEncoding

trait ArrayMirrorEncoding extends ArrayEncoding {
  this: SqlMirrorContext[_, _] =>

  implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = encoder[Col]
  implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = encoder[Col]
  implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = encoder[Col]
  implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = encoder[Col]
  implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = encoder[Col]
  implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = encoder[Col]
  implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = encoder[Col]
  implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = encoder[Col]
  implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = encoder[Col]
  implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = encoder[Col]
  implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = encoder[Col]

  implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] = decoderUnsafe[Col]
  implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = decoderUnsafe[Col]
  implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = decoderUnsafe[Col]
  implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = decoderUnsafe[Col]
  implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = decoderUnsafe[Col]
  implicit def arrayIntDecoder[Col <: Seq[Int]](implicit bf: CBF[Int, Col]): Decoder[Col] = decoderUnsafe[Col]
  implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = decoderUnsafe[Col]
  implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = decoderUnsafe[Col]
  implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] = decoderUnsafe[Col]
  implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = decoderUnsafe[Col]
  implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = decoderUnsafe[Col]
} 
Example 180
Source File: ArrayEncoding.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.sql.encoding

import java.time.LocalDate
import java.util.Date

import io.getquill.context.sql.SqlContext

import scala.collection.compat._
import scala.language.higherKinds

trait ArrayEncoding {
  self: SqlContext[_, _] =>

  type CBF[T, Col] = Factory[T, Col]

  implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col]
  implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col]
  implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col]
  implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col]
  implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col]
  implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col]
  implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col]
  implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col]
  implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col]
  implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col]
  implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col]

  implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col]
  implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col]
  implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col]
  implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col]
  implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col]
  implicit def arrayIntDecoder[Col <: Seq[Int]](implicit bf: CBF[Int, Col]): Decoder[Col]
  implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col]
  implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col]
  implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col]
  implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col]
  implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col]

  implicit def arrayMappedEncoder[I, O, Col[X] <: Seq[X]](
    implicit
    mapped: MappedEncoding[I, O],
    e:      Encoder[Seq[O]]
  ): Encoder[Col[I]] = {
    mappedEncoder[Col[I], Seq[O]](MappedEncoding((col: Col[I]) => col.map(mapped.f)), e)
  }

  implicit def arrayMappedDecoder[I, O, Col[X] <: Seq[X]](
    implicit
    mapped: MappedEncoding[I, O],
    d:      Decoder[Seq[I]],
    bf:     Factory[O, Col[O]]
  ): Decoder[Col[O]] = {
    mappedDecoder[Seq[I], Col[O]](MappedEncoding((col: Seq[I]) =>
      col.foldLeft(bf.newBuilder)((b, x) => b += mapped.f(x)).result), d)
  }
} 
Example 181
Source File: ArrayMirrorEncodingSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.sql.mirror

import java.time.LocalDate
import java.util.Date

import io.getquill.context.sql.encoding.ArrayEncodingBaseSpec
import io.getquill.context.sql.testContext

class ArrayMirrorEncodingSpec extends ArrayEncodingBaseSpec {
  val ctx = testContext

  import ctx._

  val q = quote(query[ArraysTestEntity])

  "Support all sql base types and `Seq` implementers" in {
    val insertStr = ctx.run(q.insert(lift(e))).string
    val selectStr = ctx.run(q).string

    insertStr mustEqual "INSERT INTO ArraysTestEntity (texts,decimals,bools,bytes,shorts,ints,longs,floats," +
      "doubles,timestamps,dates) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"

    selectStr mustEqual "SELECT x.texts, x.decimals, x.bools, x.bytes, x.shorts, x.ints, x.longs, x.floats, " +
      "x.doubles, x.timestamps, x.dates FROM ArraysTestEntity x"
  }

  "Support Seq encoding basing on MappedEncoding" in {
    val wrapQ = quote(querySchema[WrapEntity]("ArraysTestEntity"))

    val insertStr = ctx.run(wrapQ.insert(lift(wrapE))).string
    val selectStr = ctx.run(wrapQ).string
    insertStr mustEqual "INSERT INTO ArraysTestEntity (texts) VALUES (?)"
    selectStr mustEqual "SELECT x.texts FROM ArraysTestEntity x"
  }

  "Provide implicit encoders for raw types" in {
    implicitly[Encoder[List[String]]]
    implicitly[Encoder[List[BigDecimal]]]
    implicitly[Encoder[List[Boolean]]]
    implicitly[Encoder[List[Byte]]]
    implicitly[Encoder[List[Short]]]
    implicitly[Encoder[List[Index]]]
    implicitly[Encoder[List[Long]]]
    implicitly[Encoder[List[Float]]]
    implicitly[Encoder[List[Double]]]
    implicitly[Encoder[List[Date]]]
    implicitly[Encoder[List[LocalDate]]]
  }

  "Provide implicit decoders for raw types" in {
    implicitly[Decoder[List[String]]]
    implicitly[Decoder[List[BigDecimal]]]
    implicitly[Decoder[List[Boolean]]]
    implicitly[Decoder[List[Byte]]]
    implicitly[Decoder[List[Short]]]
    implicitly[Decoder[List[Index]]]
    implicitly[Decoder[List[Long]]]
    implicitly[Decoder[List[Float]]]
    implicitly[Decoder[List[Double]]]
    implicitly[Decoder[List[Date]]]
    implicitly[Decoder[List[LocalDate]]]
  }
} 
Example 182
Source File: ArrayEncodingBaseSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.sql.encoding

import java.time.LocalDate
import java.util.Date

import io.getquill.{ MappedEncoding, Spec }
import org.scalatest.{ Assertion, BeforeAndAfterEach }

trait ArrayEncodingBaseSpec extends Spec with BeforeAndAfterEach {

  // Support all sql base types and `Seq` implementers
  case class ArraysTestEntity(
    texts:      List[String],
    decimals:   Seq[BigDecimal],
    bools:      Vector[Boolean],
    bytes:      List[Byte],
    shorts:     IndexedSeq[Short],
    ints:       Seq[Int],
    longs:      Seq[Long],
    floats:     Seq[Float],
    doubles:    Seq[Double],
    timestamps: Seq[Date],
    dates:      Seq[LocalDate]
  )

  val e = ArraysTestEntity(List("test"), Seq(BigDecimal(2.33)), Vector(true, true), List(1),
    IndexedSeq(3), Seq(2), Seq(1, 2, 3), Seq(1f, 2f), Seq(4d, 3d),
    Seq(new Date(System.currentTimeMillis())), Seq(LocalDate.now()))

  // casting types can be dangerous so we need to ensure that everything is ok
  def baseEntityDeepCheck(e1: ArraysTestEntity, e2: ArraysTestEntity): Assertion = {
    e1.texts.head mustBe e2.texts.head
    e1.decimals.head mustBe e2.decimals.head
    e1.bools.head mustBe e2.bools.head
    e1.bytes.head mustBe e2.bytes.head
    e1.shorts.head mustBe e2.shorts.head
    e1.ints.head mustBe e2.ints.head
    e1.longs.head mustBe e2.longs.head
    e1.floats.head mustBe e2.floats.head
    e1.doubles.head mustBe e2.doubles.head
    e1.timestamps.head mustBe e2.timestamps.head
    e1.dates.head mustBe e2.dates.head
  }

  // Support Seq encoding basing on MappedEncoding
  case class StrWrap(str: String)
  implicit val strWrapEncode: MappedEncoding[StrWrap, String] = MappedEncoding(_.str)
  implicit val strWrapDecode: MappedEncoding[String, StrWrap] = MappedEncoding(StrWrap.apply)
  case class WrapEntity(texts: Seq[StrWrap])
  val wrapE = WrapEntity(List("hey", "ho").map(StrWrap.apply))
} 
Example 183
Source File: OrientDBContext.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.orientdb

import java.util.Date

import io.getquill.NamingStrategy
import io.getquill.context.Context
import io.getquill.context.orientdb.dsl.OrientDBDsl

trait OrientDBContext[Naming <: NamingStrategy]
  extends Context[OrientDBIdiom, Naming]
  with OrientDBDsl {

  implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]]
  implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]]

  implicit val stringDecoder: Decoder[String]
  implicit val doubleDecoder: Decoder[Double]
  implicit val bigDecimalDecoder: Decoder[BigDecimal]
  implicit val booleanDecoder: Decoder[Boolean]
  implicit val shortDecoder: Decoder[Short]
  implicit val intDecoder: Decoder[Int]
  implicit val longDecoder: Decoder[Long]
  implicit val floatDecoder: Decoder[Float]
  implicit val byteArrayDecoder: Decoder[Array[Byte]]
  implicit val dateDecoder: Decoder[Date]

  implicit val stringEncoder: Encoder[String]
  implicit val bigDecimalEncoder: Encoder[BigDecimal]
  implicit val booleanEncoder: Encoder[Boolean]
  implicit val shortEncoder: Encoder[Short]
  implicit val intEncoder: Encoder[Int]
  implicit val longEncoder: Encoder[Long]
  implicit val floatEncoder: Encoder[Float]
  implicit val doubleEncoder: Encoder[Double]
  implicit val dateEncoder: Encoder[Date]
  implicit val byteArrayEncoder: Encoder[Array[Byte]]

  implicit def listDecoder[T]: Decoder[List[T]]
  implicit def setDecoder[T]: Decoder[Set[T]]
  implicit def mapDecoder[K, V]: Decoder[Map[K, V]]

  implicit def listEncoder[T]: Encoder[List[T]]
  implicit def setEncoder[T]: Encoder[Set[T]]
  implicit def mapEncoder[K, V]: Encoder[Map[K, V]]
} 
Example 184
Source File: Decoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.orientdb.encoding

import java.util.Date

import io.getquill.context.orientdb.OrientDBSessionContext
import io.getquill.util.Messages.fail

trait Decoders extends CollectionDecoders {
  this: OrientDBSessionContext[_] =>

  type Decoder[T] = OrientDBDecoder[T]

  case class OrientDBDecoder[T](decoder: BaseDecoder[T]) extends BaseDecoder[T] {
    override def apply(index: Index, row: ResultRow) =
      decoder(index, row)
  }

  def decoder[T](d: BaseDecoder[T]): Decoder[T] = OrientDBDecoder(
    (index, row) =>
      if (index >= row.fieldNames().length || row.fieldValues()(index) == null) {
        fail(s"Expected column at index $index to be defined but is was empty")
      } else
        d(index, row)
  )

  def decoder[T](f: ResultRow => Index => T): Decoder[T] =
    decoder((index, row) => f(row)(index))

  implicit def optionDecoder[T](implicit d: Decoder[T]): Decoder[Option[T]] =
    OrientDBDecoder((index, row) => {
      if (index < row.fieldValues().length) {
        row.fieldValues()(index) == null match {
          case true  => None
          case false => Some(d(index, row))
        }
      } else None
    })

  implicit def mappedDecoder[I, O](implicit mapped: MappedEncoding[I, O], decoder: Decoder[I]): Decoder[O] =
    OrientDBDecoder(mappedBaseDecoder(mapped, decoder.decoder))

  implicit val stringDecoder: Decoder[String] = decoder((index, row) => {
    row.field[String](row.fieldNames()(index))
  })
  implicit val doubleDecoder: Decoder[Double] = decoder((index, row) => row.field[Double](row.fieldNames()(index)))
  implicit val bigDecimalDecoder: Decoder[BigDecimal] = decoder((index, row) => row.field[java.math.BigDecimal](row.fieldNames()(index)))
  implicit val booleanDecoder: Decoder[Boolean] = decoder((index, row) => row.field[Boolean](row.fieldNames()(index)))
  implicit val intDecoder: Decoder[Int] = decoder((index, row) => row.field[Int](row.fieldNames()(index)))
  implicit val shortDecoder: Decoder[Short] = decoder((index, row) => row.field[Short](row.fieldNames()(index)))
  implicit val byteDecoder: Decoder[Byte] = decoder((index, row) => row.field[Byte](row.fieldNames()(index)))
  implicit val longDecoder: Decoder[Long] = decoder((index, row) => {
    if (row.fieldValues()(index).isInstanceOf[Int]) {
      row.field[Int](row.fieldNames()(index)).toLong
    } else
      row.field[Long](row.fieldNames()(index))
  })
  implicit val floatDecoder: Decoder[Float] = decoder((index, row) => row.field[Float](row.fieldNames()(index)))
  implicit val byteArrayDecoder: Decoder[Array[Byte]] = decoder((index, row) => {
    row.field[Array[Byte]](row.fieldNames()(index))
  })
  implicit val dateDecoder: Decoder[Date] = decoder((index, row) => row.field[Date](row.fieldNames()(index)))
} 
Example 185
Source File: Encoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.orientdb.encoding

import java.util.Date

import io.getquill.context.orientdb.OrientDBSessionContext

trait Encoders extends CollectionEncoders {
  this: OrientDBSessionContext[_] =>

  type Encoder[T] = OrientDBEncoder[T]

  case class OrientDBEncoder[T](encoder: BaseEncoder[T]) extends BaseEncoder[T] {
    override def apply(index: Index, value: T, row: PrepareRow) =
      encoder(index, value, row)
  }

  def encoder[T](e: BaseEncoder[T]): Encoder[T] = OrientDBEncoder(e)

  def encoder[T](f: PrepareRow => (Index, T) => PrepareRow): Encoder[T] =
    encoder((index, value, row) => f(row)(index, value))

  private[this] val nullEncoder: Encoder[Null] =
    encoder((index, value, row) => { row.insert(index, null); row })

  implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] =
    encoder { (index, value, row) =>
      value match {
        case None    => nullEncoder(index, null, row)
        case Some(v) => d(index, v, row)
      }
    }

  implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], encoder: Encoder[O]): Encoder[I] =
    OrientDBEncoder(mappedBaseEncoder(mapped, encoder.encoder))

  implicit val stringEncoder: Encoder[String] = encoder((index, value, row) => { row.insert(index, value); row })
  implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder((index, value, row) => { row.insert(index, value.bigDecimal); row })
  implicit val booleanEncoder: Encoder[Boolean] = encoder((index, value, row) => { row.insert(index, value); row })
  implicit val intEncoder: Encoder[Int] = encoder((index, value, row) => { row.insert(index, value); row })
  implicit val shortEncoder: Encoder[Short] = encoder((index, value, row) => { row.insert(index, value); row })
  implicit val byteEncoder: Encoder[Byte] = encoder((index, value, row) => { row.insert(index, value); row })
  implicit val longEncoder: Encoder[Long] = encoder((index, value, row) => { row.insert(index, value); row })
  implicit val floatEncoder: Encoder[Float] = encoder((index, value, row) => { row.insert(index, value); row })
  implicit val doubleEncoder: Encoder[Double] = encoder((index, value, row) => { row.insert(index, value); row })
  implicit val dateEncoder: Encoder[Date] = encoder((index, value, row) => { row.insert(index, value); row })
  implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder((index, value, row) => { row.insert(index, value); row })
} 
Example 186
Source File: SetsEncodingSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.orientdb

import java.util.Date

import io.getquill.Spec

class SetsEncodingSpec extends Spec {

  case class SetsEntity(
    id:         Int,
    texts:      Set[String],
    bools:      Set[Boolean],
    ints:       Set[Int],
    longs:      Set[Long],
    doubles:    Set[Double],
    timestamps: Set[Date]
  )

  val e = SetsEntity(1, Set("c"), Set(true), Set(1), Set(2),
    Set(5.5d), Set(new Date()))

  private def verify(expected: SetsEntity, actual: SetsEntity): Boolean = {
    expected.id mustEqual actual.id
    expected.texts mustEqual actual.texts
    expected.bools mustEqual actual.bools
    expected.ints mustEqual actual.ints
    expected.longs mustEqual actual.longs
    expected.doubles mustEqual actual.doubles
    expected.timestamps.isInstanceOf[Date]
  }

  "mirror" in {
    val ctx = orientdb.mirrorContext
    import ctx._
    val q = quote(query[SetsEntity])
    ctx.run(q.insert(lift(e)))
    ctx.run(q)
  }

  "Set encoders/decoders" in {
    val ctx = orientdb.testSyncDB
    import ctx._
    val q = quote(query[SetsEntity])
    ctx.run(q.delete)
    ctx.run(q.insert(lift(e)))
    verify(e, ctx.run(q.filter(_.id == 1)).head)
  }

  "Empty Lists and optional fields" in {
    val ctx = orientdb.testSyncDB
    import ctx._
    case class Entity(id: Int, texts: Option[List[String]], bools: Option[List[String]])
    val e = Entity(1, Some(List("1", "2")), None)
    val q = quote(querySchema[Entity]("ListEntity"))

    ctx.run(q.delete)
    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Blob (Array[Byte]) support" ignore {
    val ctx = orientdb.testSyncDB
    import ctx._
    case class BlobsEntity(id: Int, blobs: List[Array[Byte]])
    val e = BlobsEntity(1, List(Array(1.toByte, 2.toByte), Array(2.toByte)))
    val q = quote(querySchema[BlobsEntity]("BlobsEntity"))

    ctx.run(q.delete)
    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1))
      .head.blobs.map(_.toList) mustBe e.blobs.map(_.toList)
  }

  "Set in where clause" in {
    val ctx = orientdb.testSyncDB
    import ctx._
    case class ListFrozen(id: List[Int])
    val e = ListFrozen(List(1, 2))
    val q = quote(query[ListFrozen])
    ctx.run(q.delete)
    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(p => liftQuery(Set(1)).contains(p.id))) mustBe List(e)
    ctx.run(q.filter(_.id == lift(List(1)))) mustBe Nil
  }
} 
Example 187
Source File: ListsEncodingSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.orientdb

import java.util.Date

import io.getquill.Spec

class ListsEncodingSpec extends Spec {

  case class ListsEntity(
    id:         Int,
    texts:      List[String],
    bools:      List[Boolean],
    ints:       List[Int],
    longs:      List[Long],
    floats:     List[Float],
    doubles:    List[Double],
    timestamps: List[Date]
  )
  val e = ListsEntity(1, List("c"), List(true), List(1, 2), List(2, 3), List(1.2f, 3.2f),
    List(5.1d), List(new Date()))

  private def verify(expected: ListsEntity, actual: ListsEntity): Boolean = {
    expected.id mustEqual actual.id
    expected.texts mustEqual actual.texts
    expected.bools mustEqual actual.bools
    expected.ints mustEqual actual.ints
    expected.longs mustEqual actual.longs
    expected.doubles mustEqual actual.doubles
    actual.timestamps.head.isInstanceOf[Date]
    true
  }

  "mirror" in {
    val ctx = orientdb.mirrorContext
    import ctx._
    val q = quote(query[ListsEntity])
    ctx.run(q.insert(lift(e)))
    ctx.run(q)
  }

  "List encoders/decoders for OrientDB Types" in {
    val ctx = orientdb.testSyncDB
    import ctx._
    val q = quote(query[ListsEntity])
    ctx.run(q.delete)
    ctx.run(q.insert(lift(e)))
    verify(e, ctx.run(q.filter(_.id == 1)).head)
  }

  "Empty Lists and optional fields" in {
    val ctx = orientdb.testSyncDB
    import ctx._
    case class Entity(id: Int, texts: Option[List[String]], bools: Option[List[Boolean]])
    val e = Entity(1, Some(List("1", "2")), None)
    val q = quote(querySchema[Entity]("ListEntity"))

    ctx.run(q.delete)
    ctx.run(q.insert(lift(e)))

    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Blob (Array[Byte]) support" ignore {
    val ctx = orientdb.testSyncDB
    import ctx._
    case class BlobsEntity(id: Int, blobs: List[Array[Byte]])
    val e = BlobsEntity(1, List(Array(1.toByte, 2.toByte), Array(2.toByte)))
    val q = quote(querySchema[BlobsEntity]("BlobsEntity"))

    ctx.run(q.delete)
    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1))
      .head.blobs.map(_.toList) mustBe e.blobs.map(_.toList)
  }

  "List in where clause" in {
    val ctx = orientdb.testSyncDB
    import ctx._
    case class ListFrozen(id: List[Int])
    val e = ListFrozen(List(1, 2))
    val q = quote(query[ListFrozen])
    ctx.run(q.delete)
    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(p => liftQuery(Set(1)).contains(p.id))) mustBe List(e)
    ctx.run(q.filter(_.id == lift(List(1)))) mustBe Nil
  }
} 
Example 188
Source File: MapsEncodingSpec.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.orientdb

import java.util.Date

import io.getquill.Spec

class MapsEncodingSpec extends Spec {

  case class MapsEntity(
    id:         Int,
    longDouble: Map[Long, Double],
    intDouble:  Map[Int, Double],
    boolDate:   Map[Boolean, Date]
  )
  val e = MapsEntity(1, Map(1L -> 1.1), Map(1 -> 1.1d), Map(true -> new Date()))

  private def verify(expected: MapsEntity, actual: MapsEntity): Boolean = {
    expected.id mustEqual actual.id
    expected.longDouble.head._2 mustEqual actual.longDouble.head._2
    expected.intDouble.head._2 mustEqual actual.intDouble.head._2
    actual.boolDate.head._2.isInstanceOf[Date]
    true
  }

  "mirror" in {
    val ctx = orientdb.mirrorContext
    import ctx._
    val q = quote(query[MapsEntity])
    ctx.run(q.insert(lift(e)))
    ctx.run(q)
  }

  "Map encoders/decoders" in {
    val ctx = orientdb.testSyncDB
    import ctx._
    val q = quote(query[MapsEntity])
    ctx.run(q.delete)
    ctx.run(q.insert(lift(e)))
    verify(e, ctx.run(q.filter(_.id == 1)).head)
  }

  "Empty maps and optional fields" in {
    val ctx = orientdb.testSyncDB
    import ctx._
    case class Entity(
      id:         Int,
      intDouble:  Option[Map[Int, Double]],
      longDouble: Option[Map[Long, Double]]
    )
    val e = Entity(1, None, None)
    val q = quote(querySchema[Entity]("MapEntity"))

    ctx.run(q.delete)
    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(_.id == 1)).head mustBe e
  }

  "Map in where clause" in {
    val ctx = orientdb.testSyncDB
    import ctx._
    case class MapFrozen(id: Map[Int, Boolean])
    val e = MapFrozen(Map(1 -> true))
    val q = quote(query[MapFrozen])
    ctx.run(q.delete)
    ctx.run(q.insert(lift(e)))
    ctx.run(q.filter(p => liftQuery(Set(1))
      .contains(p.id))).head.id.head._2 mustBe e.id.head._2
  }
} 
Example 189
Source File: Encoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.async

import java.time.{ LocalDate, LocalDateTime, LocalTime, OffsetDateTime, ZoneId, ZonedDateTime }
import java.util.Date

import org.joda.time.{ DateTime => JodaDateTime, DateTimeZone => JodaDateTimeZone, LocalTime => JodaLocalTime, LocalDate => JodaLocalDate, LocalDateTime => JodaLocalDateTime }

trait Encoders {
  this: AsyncContext[_, _, _] =>

  type Encoder[T] = AsyncEncoder[T]

  type EncoderSqlType = SqlTypes.SqlTypes

  case class AsyncEncoder[T](sqlType: DecoderSqlType)(implicit encoder: BaseEncoder[T])
    extends BaseEncoder[T] {
    override def apply(index: Index, value: T, row: PrepareRow) =
      encoder.apply(index, value, row)
  }

  def encoder[T](sqlType: DecoderSqlType): Encoder[T] =
    encoder(identity[T], sqlType)

  def encoder[T](f: T => Any, sqlType: DecoderSqlType): Encoder[T] =
    AsyncEncoder[T](sqlType)(new BaseEncoder[T] {
      def apply(index: Index, value: T, row: PrepareRow) =
        row :+ f(value)
    })

  implicit def mappedEncoder[I, O](implicit mapped: MappedEncoding[I, O], e: Encoder[O]): Encoder[I] =
    AsyncEncoder(e.sqlType)(new BaseEncoder[I] {
      def apply(index: Index, value: I, row: PrepareRow) =
        e(index, mapped.f(value), row)
    })

  implicit def optionEncoder[T](implicit d: Encoder[T]): Encoder[Option[T]] =
    AsyncEncoder(d.sqlType)(new BaseEncoder[Option[T]] {
      def apply(index: Index, value: Option[T], row: PrepareRow) = {
        value match {
          case None    => nullEncoder(index, null, row)
          case Some(v) => d(index, v, row)
        }
      }
    })

  private[this] val nullEncoder: Encoder[Null] = encoder[Null](SqlTypes.NULL)

  implicit val stringEncoder: Encoder[String] = encoder[String](SqlTypes.VARCHAR)
  implicit val bigDecimalEncoder: Encoder[BigDecimal] = encoder[BigDecimal](SqlTypes.REAL)
  implicit val booleanEncoder: Encoder[Boolean] = encoder[Boolean](SqlTypes.BOOLEAN)
  implicit val byteEncoder: Encoder[Byte] = encoder[Byte](SqlTypes.TINYINT)
  implicit val shortEncoder: Encoder[Short] = encoder[Short](SqlTypes.SMALLINT)
  implicit val intEncoder: Encoder[Int] = encoder[Int](SqlTypes.INTEGER)
  implicit val longEncoder: Encoder[Long] = encoder[Long](SqlTypes.BIGINT)
  implicit val floatEncoder: Encoder[Float] = encoder[Float](SqlTypes.FLOAT)
  implicit val doubleEncoder: Encoder[Double] = encoder[Double](SqlTypes.DOUBLE)
  implicit val byteArrayEncoder: Encoder[Array[Byte]] = encoder[Array[Byte]](SqlTypes.VARBINARY)
  implicit val jodaDateTimeEncoder: Encoder[JodaDateTime] = encoder[JodaDateTime](SqlTypes.TIMESTAMP)
  implicit val jodaLocalDateEncoder: Encoder[JodaLocalDate] = encoder[JodaLocalDate](SqlTypes.DATE)
  implicit val jodaLocalDateTimeEncoder: Encoder[JodaLocalDateTime] = encoder[JodaLocalDateTime](SqlTypes.TIMESTAMP)
  implicit val dateEncoder: Encoder[Date] = encoder[Date]((d: Date) => new JodaLocalDateTime(d), SqlTypes.TIMESTAMP)

  implicit val encodeZonedDateTime: MappedEncoding[ZonedDateTime, JodaDateTime] =
    MappedEncoding(zdt => new JodaDateTime(zdt.toInstant.toEpochMilli, JodaDateTimeZone.forID(zdt.getZone.getId)))

  implicit val encodeOffsetDateTime: MappedEncoding[OffsetDateTime, JodaDateTime] =
    MappedEncoding(odt => new JodaDateTime(odt.toInstant.toEpochMilli, JodaDateTimeZone.forID(odt.getOffset.getId)))

  implicit val encodeLocalDate: MappedEncoding[LocalDate, JodaLocalDate] =
    MappedEncoding(ld => new JodaLocalDate(ld.getYear, ld.getMonthValue, ld.getDayOfMonth))

  implicit val encodeLocalTime: MappedEncoding[LocalTime, JodaLocalTime] =
    MappedEncoding(lt => new JodaLocalTime(lt.getHour, lt.getMinute, lt.getSecond))

  implicit val encodeLocalDateTime: MappedEncoding[LocalDateTime, JodaLocalDateTime] =
    MappedEncoding(ldt => new JodaLocalDateTime(ldt.atZone(ZoneId.systemDefault()).toInstant.toEpochMilli))

  implicit val localDateEncoder: Encoder[LocalDate] = mappedEncoder(encodeLocalDate, jodaLocalDateEncoder)
} 
Example 190
Source File: ArrayEncoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.jdbc

import java.sql.{ Timestamp, Date => SqlDate }
import java.sql.Types._
import java.time.LocalDate
import java.util.Date

import io.getquill.context.sql.encoding.ArrayEncoding
import scala.collection.compat._

trait ArrayEncoders extends ArrayEncoding {
  self: JdbcContextBase[_, _] =>

  implicit def arrayStringEncoder[Col <: Seq[String]]: Encoder[Col] = arrayRawEncoder[String, Col](VARCHAR)
  implicit def arrayBigDecimalEncoder[Col <: Seq[BigDecimal]]: Encoder[Col] = arrayEncoder[BigDecimal, Col](parseJdbcType(NUMERIC), _.bigDecimal)
  implicit def arrayBooleanEncoder[Col <: Seq[Boolean]]: Encoder[Col] = arrayRawEncoder[Boolean, Col](BOOLEAN)
  implicit def arrayByteEncoder[Col <: Seq[Byte]]: Encoder[Col] = arrayRawEncoder[Byte, Col](TINYINT)
  implicit def arrayShortEncoder[Col <: Seq[Short]]: Encoder[Col] = arrayRawEncoder[Short, Col](SMALLINT)
  implicit def arrayIntEncoder[Col <: Seq[Int]]: Encoder[Col] = arrayRawEncoder[Int, Col](INTEGER)
  implicit def arrayLongEncoder[Col <: Seq[Long]]: Encoder[Col] = arrayRawEncoder[Long, Col](BIGINT)
  implicit def arrayFloatEncoder[Col <: Seq[Float]]: Encoder[Col] = arrayRawEncoder[Float, Col](FLOAT)
  implicit def arrayDoubleEncoder[Col <: Seq[Double]]: Encoder[Col] = arrayRawEncoder[Double, Col](DOUBLE)
  implicit def arrayDateEncoder[Col <: Seq[Date]]: Encoder[Col] = arrayRawEncoder[Date, Col](TIMESTAMP)
  implicit def arrayTimestampEncoder[Col <: Seq[Timestamp]]: Encoder[Col] = arrayRawEncoder[Timestamp, Col](TIMESTAMP)
  implicit def arrayLocalDateEncoder[Col <: Seq[LocalDate]]: Encoder[Col] = arrayEncoder[LocalDate, Col](parseJdbcType(DATE), SqlDate.valueOf)

  
  def arrayRawEncoder[T, Col <: Seq[T]](jdbcType: Int): Encoder[Col] =
    arrayRawEncoder[T, Col](parseJdbcType(jdbcType))
} 
Example 191
Source File: ArrayDecoders.scala    From quill   with Apache License 2.0 5 votes vote down vote up
package io.getquill.context.jdbc

import java.sql.Timestamp
import java.time.LocalDate
import java.util.Date
import java.sql.{ Date => SqlDate }
import java.math.{ BigDecimal => JBigDecimal }

import io.getquill.context.sql.encoding.ArrayEncoding
import io.getquill.util.Messages.fail

import scala.collection.compat._
import scala.reflect.ClassTag

trait ArrayDecoders extends ArrayEncoding {
  self: JdbcContextBase[_, _] =>

  implicit def arrayStringDecoder[Col <: Seq[String]](implicit bf: CBF[String, Col]): Decoder[Col] = arrayRawDecoder[String, Col]
  implicit def arrayBigDecimalDecoder[Col <: Seq[BigDecimal]](implicit bf: CBF[BigDecimal, Col]): Decoder[Col] = arrayDecoder[JBigDecimal, BigDecimal, Col](BigDecimal.apply)
  implicit def arrayBooleanDecoder[Col <: Seq[Boolean]](implicit bf: CBF[Boolean, Col]): Decoder[Col] = arrayRawDecoder[Boolean, Col]
  implicit def arrayByteDecoder[Col <: Seq[Byte]](implicit bf: CBF[Byte, Col]): Decoder[Col] = arrayRawDecoder[Byte, Col]
  implicit def arrayShortDecoder[Col <: Seq[Short]](implicit bf: CBF[Short, Col]): Decoder[Col] = arrayRawDecoder[Short, Col]
  implicit def arrayIntDecoder[Col <: Seq[Int]](implicit bf: CBF[Int, Col]): Decoder[Col] = arrayRawDecoder[Int, Col]
  implicit def arrayLongDecoder[Col <: Seq[Long]](implicit bf: CBF[Long, Col]): Decoder[Col] = arrayRawDecoder[Long, Col]
  implicit def arrayFloatDecoder[Col <: Seq[Float]](implicit bf: CBF[Float, Col]): Decoder[Col] = arrayRawDecoder[Float, Col]
  implicit def arrayDoubleDecoder[Col <: Seq[Double]](implicit bf: CBF[Double, Col]): Decoder[Col] = arrayRawDecoder[Double, Col]
  implicit def arrayDateDecoder[Col <: Seq[Date]](implicit bf: CBF[Date, Col]): Decoder[Col] = arrayRawDecoder[Date, Col]
  implicit def arrayTimestampDecoder[Col <: Seq[Timestamp]](implicit bf: CBF[Timestamp, Col]): Decoder[Col] = arrayRawDecoder[Timestamp, Col]
  implicit def arrayLocalDateDecoder[Col <: Seq[LocalDate]](implicit bf: CBF[LocalDate, Col]): Decoder[Col] = arrayDecoder[SqlDate, LocalDate, Col](_.toLocalDate)

  
  def arrayRawDecoder[T: ClassTag, Col <: Seq[T]](implicit bf: CBF[T, Col]): Decoder[Col] =
    arrayDecoder[T, T, Col](identity)
} 
Example 192
Source File: DeepsenseUniversalSettingsPlugin.scala    From seahorse   with Apache License 2.0 5 votes vote down vote up
import java.util.Date

import com.typesafe.sbt.GitPlugin
import com.typesafe.sbt.SbtGit.git
import com.typesafe.sbt.packager.SettingsHelper
import com.typesafe.sbt.packager.universal.UniversalPlugin
import com.typesafe.sbt.packager.universal.UniversalPlugin.autoImport.Universal
import sbt.Keys._
import sbt._

object DeepsenseUniversalSettingsPlugin extends AutoPlugin {

  val gitVersion = taskKey[String]("Git version")

  val gitVersionFile = taskKey[File]("Git version file")

  override def requires = CommonSettingsPlugin && UniversalPlugin && GitPlugin

  override def projectSettings = Seq(
    gitVersion := {
      git.gitHeadCommit.value.getOrElse((version in Universal).value)
    },
    gitVersionFile := {
      val location = target.value / "build-info.txt"
      location.getParentFile.mkdirs()
      IO.write(location, "BUILD DATE: " + new Date().toString + "\n")
      IO.write(location, "GIT SHA: " + gitVersion.value + "\n", append = true)
      IO.write(location, "API VERSION: " + version.value + "\n", append = true)
      location
    },
    mappings in Universal += gitVersionFile.value -> "build-info.txt"
  ) ++ Seq(
    publish := (publish dependsOn (packageBin in Universal)).value
  ) ++ SettingsHelper.makeDeploymentSettings(Universal, packageBin in Universal, "zip")
} 
Example 193
Source File: GeneratorTest.scala    From kafka-connect-kcql-smt   with Apache License 2.0 5 votes vote down vote up
package com.landoop.connect.sql

import java.text.SimpleDateFormat
import java.util.Date

import com.landoop.json.sql.JacksonJson
import com.sksamuel.avro4s.SchemaFor
import org.scalatest.{Matchers, WordSpec}

import scala.util.Random


class GeneratorTest extends WordSpec with Matchers {
  "Generator" should {
    "generate schema" in {
      val sql = Sql.parse("SELECT * FROM `order-topic`")
      val schema = SchemaFor[Product]()
      val str = schema.toString
      println(str)
    }

    "generate data" in {
      val rnd = new Random(System.currentTimeMillis())
      val f = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss.FFF")
      val products = (1 to 4).map { i =>
        Product(i, f.format(new Date()), s"product_$i", Payment(rnd.nextDouble(), i * rnd.nextInt(3), "GBP"))
      }.map(JacksonJson.toJson).mkString(s"${System.lineSeparator()}")
      println(products)
    }
  }

}


case class Product(id: Int,
                   created: String,
                   name: String,
                   payment: Payment)

case class Payment(price: Double,
                   quantity: Int,
                   currency: String) 
Example 194
Source File: DQCommandLineOptions.scala    From DataQuality   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package it.agilelab.bigdata.DataQuality.utils

import java.util.{Calendar, Date}
import org.joda.time
import org.joda.time.DateTime

import scopt.OptionParser

case class DQCommandLineOptions(applicationConf: String,
                                configFilePath: String,
                                refDate: Date = new Date(),
                                repartition: Boolean = false,
                                local: Boolean = false)

object DQCommandLineOptions {

  def parser(): OptionParser[DQCommandLineOptions] =
    new OptionParser[DQCommandLineOptions]("dataquality") {

      opt[String]('a', "application-conf") required () action { (x, c) =>
        c.copy(applicationConf = x)
      } text "Path to application configuration file"

      opt[String]('c', "configFilePath") required () action { (x, c) =>
        c.copy(configFilePath = x)
      } text "Path to run configuration file"

      opt[Calendar]('d', "reference-date") required () action { (x, c) =>
        c.copy(refDate = x.getTime)
      } text "Indicates the date at which the DataQuality checks will be performed (format YYYY-MM-DD)"

      opt[Unit]('r', "repartition") optional () action { (_, c) =>
        c.copy(repartition = true)
      } text "Specifies whether the application is repartitioning the input data"

      opt[Unit]('l', "local") optional () action { (_, c) =>
        c.copy(local = true)
      } text "Specifies whether the application is operating in local mode"
    }
} 
Example 195
Source File: MetricResultDAOApi.scala    From DataQuality   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package dbmodel.results

import java.util.Date

import dbmodel.sources.MetricItem
import org.squeryl.Query
import org.squeryl.dsl.ast.LogicalBoolean

trait MetricResultDAOApi {

  def selectSortedColumnar(sortBy: Option[String],
                           paramOrderBy: Option[String],
                           whereClause: (MetricItem, MetricResultsColumnarItemDB) => LogicalBoolean)
                          (implicit requestId: String): Query[(MetricItem, MetricResultsColumnarItemDB)]

  def selectSortedFile(sortBy: Option[String],
                       paramOrderBy: Option[String],
                       whereClause: (MetricItem, MetricResultsFileItemDB) => LogicalBoolean)
                      (implicit requestId: String): Query[(MetricItem, MetricResultsFileItemDB)]

  def getById(metricId: String,
              page: Option[Int],
              limit: Option[Int],
              sortBy: Option[String],
              orderBy: Option[String])(implicit requestId: String): (Long, List[MetricResultsItemDB])

  def getBySource(sourceId: String,
                  page: Option[Int],
                  limit: Option[Int],
                  sortBy: Option[String],
                  orderBy: Option[String])(implicit requestId: String): (Long, List[MetricResultsItemDB])

  def getBySourceAndDate(sourceId: String,
                         startDateD: Date,
                         endDateD: Date,
                         page: Option[Int],
                         limit: Option[Int],
                         sortBy: Option[String],
                         orderBy: Option[String])(implicit requestId: String): (Long, List[MetricResultsItemDB])

  def getByDate(startDateD: Date,
                endDateD: Date,
                page: Option[Int],
                limit: Option[Int],
                sortBy: Option[String],
                orderBy: Option[String])(implicit requestId: String): (Long, List[MetricResultsItemDB])
} 
Example 196
Source File: ExamplesTest.scala    From json-schema-codegen   with Apache License 2.0 5 votes vote down vote up
import java.net.{Inet6Address, InetAddress, Inet4Address, URI}
import java.util.Date

import argonaut.Argonaut._
import argonaut._
import org.scalatest.{FlatSpec, Matchers}
import product.vox.shop._


class ExamplesTest extends FlatSpec with Matchers {


  "AdditionalPropertiesJson" should "encode and decode" in {
    import additional.Codecs._
    test(additional.Properties("bvalue", Some(Map("p1" -> additional.PropertiesAdditional(1)))))
  }

  "AdditionalPropertiesOnlyJson" should "encode and decode" in {
    import additional.properties.Codecs._
    test(additional.properties.Only(Some(Map("p1" -> additional.properties.OnlyAdditional(1)))))
  }

  "EnumStrings" should "encode and decode" in {
    import Codecs._
    test(Strings.a)
    test(Strings.b)
  }

  "EnumIntegers" should "encode and decode" in {
    import Codecs._
    test(Integers.v10)
    test(Integers.v20)
  }

  "Formats" should "encode and decode" in {
    import Codecs._
    test(Formats(
      new URI("http://uri/address"),
      InetAddress.getByName("127.0.0.1").asInstanceOf[Inet4Address],
      InetAddress.getByName("FE80:0000:0000:0000:0202:B3FF:FE1E:8329").asInstanceOf[Inet6Address],
      new Date()
    ))
  }

  "Product" should "decode from string and encode to string" in {
    import product.vox.shop.Codecs._
    val js = """{"name":"Recharge Cards (5 PIN)","prices":[{"cost":0.0187,"currency":"USD","moq":200000}],"eid":"iso-card-5-pin","description":"<p>ISO card, 5 PINs, printed 4 colour front and back</p>\n<p>Every card option shown below meets Tier 1 operator quality standards, at a competitive pricing including freight to your country that’s always openly visible, with streamlined fulfillment and support included, creating what we believe is the best overall value at the lowest total cost of ownership in the industry.</p>\n<p>Material:        Cardboard 300 GSM, UV varnish both sides</p>\n<p>Scratch panel:   Silver/Black Ink with black overprint</p> \n<p>Individually plastic wrapped in chain of 50 cards</p>\n<p>Small boxes of 500 cards, Master Carton of 5000 cards</p>\n<p>Alternate names: Scratch cards, RCV, top-up cards</p>\n","properties":[{"name":"Overscratch Protection","options":[{"name":"No protection"},{"name":"Protective measures against over scratching","prices":[{"cost":0.0253,"currency":"USD","moq":200000},{"cost":0.021,"currency":"USD","moq":500000},{"cost":0.02,"currency":"USD","moq":1000000},{"cost":0.0188,"currency":"USD","moq":5000000,"leadtime":21},{"cost":0.0173,"currency":"USD","moq":10000000},{"cost":0.0171,"currency":"USD","moq":50000000,"leadtime":28}]}]},{"name":"Payment terms","options":[{"name":"Payment on shipment readiness"},{"name":"Net 30 (subject to approval)"}]},{"name":"Order Timing","options":[{"name":"Ship order when ready"},{"name":"Pre-order for shipment in 3 months"}]}],"client":"112","sample":{"price":{"cost":250,"currency":"USD"}},"category":"recharge_cards","leadtime":14,"imageUrl":["https://d2w2n7dk76p3lq.cloudfront.net/product_image/recharge_cards/iso-5pin.png"],"types":[{"name":"Recharge Cards (5 PIN)","prices":[{"cost":0.0187,"currency":"USD","moq":200000},{"cost":0.0175,"currency":"USD","moq":500000},{"cost":0.0162,"currency":"USD","moq":1000000},{"cost":0.0153,"currency":"USD","moq":5000000,"leadtime":21},{"cost":0.0138,"currency":"USD","moq":10000000,"leadtime":28},{"cost":0.0137,"currency":"USD","moq":50000000,"leadtime":28}]}],"presentation":1000}"""
    val po = js.decodeValidation[Product]
    println(po)
    po.isSuccess shouldBe true
    test(po.toOption.get)
  }


  def test[T: CodecJson](value: T) = {
    val json = value.asJson
    println(json)
    json.jdecode[T] shouldBe DecodeResult.ok(value)
  }
} 
Example 197
Source File: BaseModel.scala    From OUTDATED_ledger-wallet-android   with MIT License 5 votes vote down vote up
package co.ledger.wallet.app.base.model

import java.util.Date

import org.json.{JSONException, JSONObject}

import scala.collection.mutable

class BaseModel {

  private[this] val _structure = new mutable.HashMap[String, Property[_]]()
  def structure = _structure

  protected def string(name: String): StringProperty = new StringProperty(name)
  protected def int(name: String): IntProperty = new IntProperty(name)
  protected def long(name: String): LongProperty = new LongProperty(name)
  protected def boolean(name: String): BooleanProperty = new BooleanProperty(name)
  protected def double(name: String): DoubleProperty = new DoubleProperty(name)
  protected def date(name: String): DateProperty = new DateProperty(name)

  def toJson: JSONObject = {
    val structure = this.structure
    val json = new JSONObject()
    try {
      structure.foreach { case (key, value) =>
        if (value.isDefined) {
          value match {
            case string: StringProperty => json.put(key, string.get)
            case int: IntProperty => json.put(key, int.get)
            case boolean: BooleanProperty => json.put(key, boolean.get)
            case double: DoubleProperty => json.put(key, double.get)
            case long: LongProperty => json.put(key, long.get)
            case date: DateProperty => json.put(key, date.get.asInstanceOf[Date].getTime)
          }
        }
      }
    } catch {
      case json: JSONException => null
    }
    json
  }

  def apply(propertyName: String): Property[_] = structure(propertyName)

  class Property[T](val name: String) {
    structure(name) = this

    private var _value: T = _

    def get: T = _value
    def set(value: T): this.type = {
      _value = value
      this
    }
    def isEmpty = _value == null
    def isDefined = !isEmpty
  }

  class StringProperty(name: String) extends Property[String](name)
  class IntProperty(name: String) extends  Property[Int](name)
  class DoubleProperty(name: String) extends Property[Double](name)
  class BooleanProperty(name: String) extends Property[Boolean](name)
  class LongProperty(name: String) extends Property[Long](name)
  class DateProperty(name: String) extends Property[Date](name)

} 
Example 198
Source File: Collection.scala    From OUTDATED_ledger-wallet-android   with MIT License 5 votes vote down vote up
package co.ledger.wallet.app.base.model

import java.util.Date

import org.json.{JSONArray, JSONObject}

import scala.reflect.ClassTag

class Collection[T <: BaseModel](implicit T: ClassTag[T]) {

  def inflate(jsonObject: JSONObject): Option[T] = {
    val obj = T.runtimeClass.newInstance().asInstanceOf[T]
    obj.structure.foreach {case (key, property) =>
       if (jsonObject.has(key)) {
         property match {
           case string: obj.StringProperty => string.set(jsonObject.getString(key))
           case int: obj.IntProperty => int.set(jsonObject.getInt(key))
           case double: obj.DoubleProperty => double.set(jsonObject.getDouble(key))
           case float: obj.BooleanProperty => float.set(jsonObject.getBoolean(key))
           case date: obj.DateProperty => date.set(new Date(jsonObject.getLong(key)))
           case long: obj.LongProperty => long.set(jsonObject.getLong(key))
         }
       }
    }
    Option(obj)
  }

  def inflate(jsonArray: JSONArray): Array[Option[T]] = {
    val array = new Array[Option[T]](jsonArray.length())
    for (i <- 0 until jsonArray.length()) {
      array(i) = inflate(jsonArray.getJSONObject(i))
    }
    array
  }

  def toJson(obj: BaseModel) = obj.toJson
  def toJson(objs: Array[BaseModel]): JSONArray = {
    val array = new JSONArray()
    for (obj <- objs) {
      array.put(obj.toJson)
    }
    array
  }

}

object Collection extends Collection[BaseModel] {

} 
Example 199
Source File: PMMLModelExport.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.mllib.pmml.export

import java.text.SimpleDateFormat
import java.util.Date

import scala.beans.BeanProperty

import org.dmg.pmml.{Application, Header, PMML, Timestamp}

private[mllib] trait PMMLModelExport {

  
  @BeanProperty
  val pmml: PMML = new PMML

  setHeader(pmml)

  private def setHeader(pmml: PMML): Unit = {
    val version = getClass.getPackage.getImplementationVersion
    val app = new Application().withName("Apache Spark MLlib").withVersion(version)
    val timestamp = new Timestamp()
      .withContent(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss").format(new Date()))
    val header = new Header()
      .withApplication(app)
      .withTimestamp(timestamp)
    pmml.setHeader(header)
  }
} 
Example 200
Source File: MesosDriverDescription.scala    From spark1.52   with Apache License 2.0 5 votes vote down vote up
package org.apache.spark.deploy.mesos

import java.util.Date

import org.apache.spark.deploy.Command
import org.apache.spark.scheduler.cluster.mesos.MesosClusterRetryState


private[spark] class MesosDriverDescription(
    val name: String,
    val jarUrl: String,
    val mem: Int,
    val cores: Double,
    val supervise: Boolean,
    val command: Command,
    val schedulerProperties: Map[String, String],
    val submissionId: String,
    val submissionDate: Date,
    val retryState: Option[MesosClusterRetryState] = None)
  extends Serializable {

  def copy(
      name: String = name,
      jarUrl: String = jarUrl,
      mem: Int = mem,
      cores: Double = cores,
      supervise: Boolean = supervise,
      command: Command = command,
      schedulerProperties: Map[String, String] = schedulerProperties,
      submissionId: String = submissionId,
      submissionDate: Date = submissionDate,
      retryState: Option[MesosClusterRetryState] = retryState): MesosDriverDescription = {
    new MesosDriverDescription(name, jarUrl, mem, cores, supervise, command, schedulerProperties,
      submissionId, submissionDate, retryState)
  }

  override def toString: String = s"MesosDriverDescription (${command.mainClass})"
}