akka.http.scaladsl.model.Uri Scala Examples

The following examples show how to use akka.http.scaladsl.model.Uri. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
Example 1
Source File: HttpServiceIntegrationTest.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.http

import java.io.File
import java.nio.file.Files

import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpMethods, HttpRequest, StatusCodes, Uri}
import com.daml.http.Statement.discard
import com.daml.http.util.TestUtil.writeToFile
import org.scalacheck.Gen
import org.scalatest.{Assertion, BeforeAndAfterAll}

import scala.concurrent.Future

class HttpServiceIntegrationTest extends AbstractHttpServiceIntegrationTest with BeforeAndAfterAll {

  private val staticContent: String = "static"

  private val staticContentDir: File =
    Files.createTempDirectory("integration-test-static-content").toFile

  override def staticContentConfig: Option[StaticContentConfig] =
    Some(StaticContentConfig(prefix = staticContent, directory = staticContentDir))

  override def jdbcConfig: Option[JdbcConfig] = None

  private val expectedDummyContent: String = Gen
    .listOfN(100, Gen.identifier)
    .map(_.mkString(" "))
    .sample
    .getOrElse(throw new IllegalStateException(s"Cannot create dummy text content"))

  private val dummyFile: File =
    writeToFile(new File(staticContentDir, "dummy.txt"), expectedDummyContent).get
  require(dummyFile.exists)

  override protected def afterAll(): Unit = {
    // clean up temp directory
    discard { dummyFile.delete() }
    discard { staticContentDir.delete() }
    super.afterAll()
  }

  "should serve static content from configured directory" in withHttpService { (uri: Uri, _, _) =>
    Http()
      .singleRequest(
        HttpRequest(
          method = HttpMethods.GET,
          uri = uri.withPath(Uri.Path(s"/$staticContent/${dummyFile.getName}"))))
      .flatMap { resp =>
        discard { resp.status shouldBe StatusCodes.OK }
        val bodyF: Future[String] = getResponseDataBytes(resp, debug = false)
        bodyF.flatMap { body =>
          body shouldBe expectedDummyContent
        }
      }: Future[Assertion]
  }

  "Forwarded" - {
    import Endpoints.Forwarded
    "can 'parse' sample" in {
      Forwarded("for=192.168.0.1;proto=http;by=192.168.0.42").proto should ===(Some("http"))
    }

    "can 'parse' quoted sample" in {
      Forwarded("for=192.168.0.1;proto = \"https\" ;by=192.168.0.42").proto should ===(
        Some("https"))
    }
  }
} 
Example 2
Source File: TlsTest.scala    From daml   with Apache License 2.0 5 votes vote down vote up
// Copyright (c) 2020 Digital Asset (Switzerland) GmbH and/or its affiliates. All rights reserved.
// SPDX-License-Identifier: Apache-2.0

package com.daml.http

import HttpServiceTestFixture.UseTls
import akka.http.scaladsl.model.{StatusCodes, Uri}
import org.scalatest.{Assertion, AsyncFreeSpec, Inside, Matchers}
import spray.json.{JsArray, JsObject}

import scala.concurrent.Future

@SuppressWarnings(Array("org.wartremover.warts.NonUnitStatements"))
class TlsTest
    extends AsyncFreeSpec
    with Matchers
    with Inside
    with AbstractHttpServiceIntegrationTestFuns {

  override def jdbcConfig = None

  override def staticContentConfig = None

  override def useTls = UseTls.Tls

  "connect normally with tls on" in withHttpService { (uri: Uri, _, _) =>
    getRequest(uri = uri.withPath(Uri.Path("/v1/query")))
      .flatMap {
        case (status, output) =>
          status shouldBe StatusCodes.OK
          assertStatus(output, StatusCodes.OK)
          inside(output) {
            case JsObject(fields) =>
              inside(fields.get("result")) {
                case Some(JsArray(vector)) => vector should have size 0L
              }
          }
      }: Future[Assertion]
  }
} 
Example 3
Source File: Registry.scala    From kanadi   with MIT License 5 votes vote down vote up
package org.zalando.kanadi.api

import java.net.URI

import defaults._
import akka.http.scaladsl.HttpExt
import akka.http.scaladsl.model.headers.RawHeader
import akka.http.scaladsl.model.{ContentTypes, HttpMethods, HttpRequest, Uri}
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.Materializer
import com.typesafe.scalalogging.{Logger, LoggerTakingImplicit}
import de.heikoseeberger.akkahttpcirce.ErrorAccumulatingCirceSupport._
import org.mdedetrich.webmodels.{FlowId, OAuth2TokenProvider}
import org.mdedetrich.webmodels.RequestHeaders.`X-Flow-ID`
import org.zalando.kanadi.models._

import scala.concurrent.{ExecutionContext, Future}

case class Registry(baseUri: URI, oAuth2TokenProvider: Option[OAuth2TokenProvider] = None)(implicit
                                                                                           kanadiHttpConfig: HttpConfig,
                                                                                           http: HttpExt,
                                                                                           materializer: Materializer)
    extends RegistryInterface {
  protected val logger: LoggerTakingImplicit[FlowId] = Logger.takingImplicit[FlowId](classOf[Registry])
  private val baseUri_                               = Uri(baseUri.toString)

  
  def partitionStrategies(implicit flowId: FlowId = randomFlowId(),
                          executionContext: ExecutionContext): Future[List[PartitionStrategy]] = {
    val uri =
      baseUri_.withPath(baseUri_.path / "registry" / "partition-strategies")

    val baseHeaders = List(RawHeader(`X-Flow-ID`, flowId.value))

    for {
      headers <- oAuth2TokenProvider match {
                  case None => Future.successful(baseHeaders)
                  case Some(futureProvider) =>
                    futureProvider.value().map { oAuth2Token =>
                      toHeader(oAuth2Token) +: baseHeaders
                    }
                }
      request  = HttpRequest(HttpMethods.GET, uri, headers)
      _        = logger.debug(request.toString)
      response <- http.singleRequest(request)
      result <- {
        if (response.status.isSuccess()) {
          Unmarshal(response.entity.httpEntity.withContentType(ContentTypes.`application/json`))
            .to[List[PartitionStrategy]]
        } else
          processNotSuccessful(request, response)
      }
    } yield result
  }

} 
Example 4
Source File: HydraDirectivesSpec.scala    From hydra   with Apache License 2.0 5 votes vote down vote up
package hydra.core.http

import akka.http.scaladsl.model.headers.Location
import akka.http.scaladsl.model.{StatusCodes, Uri}
import akka.http.scaladsl.testkit.ScalatestRouteTest
import org.scalatest.matchers.should.Matchers
import org.scalatest.funspec.AnyFunSpecLike

class HydraDirectivesSpec
    extends Matchers
    with AnyFunSpecLike
    with ScalatestRouteTest
    with HydraDirectives {

  describe("Hydra Directives") {
    it("completes with location header") {
      Get() ~> completeWithLocationHeader(StatusCodes.OK, 123) ~> check {
        header[Location].get.uri shouldBe Uri("http://example.com/123")
      }
    }
    it("imperatively completes") {
      Get() ~> imperativelyComplete((ctx) => ctx.complete("DONE")) ~> check {
        responseAs[String] shouldBe "DONE"
      }
    }
  }

} 
Example 5
Source File: ClusterConnectionFlow.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.balancing.discovery.cluster

import akka.actor.{ActorSystem, Cancellable}
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.settings.ConnectionPoolSettings
import akka.stream.Materializer
import akka.stream.scaladsl.Source
import com.crobox.clickhouse.balancing.discovery.ConnectionManagerActor.Connections
import com.crobox.clickhouse.internal.QuerySettings.ReadQueries
import com.crobox.clickhouse.internal.{ClickhouseHostBuilder, ClickhouseQueryBuilder, ClickhouseResponseParser, QuerySettings}
import com.typesafe.scalalogging.LazyLogging

import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}

private[clickhouse] object ClusterConnectionFlow
    extends ClickhouseQueryBuilder
    with ClickhouseResponseParser
    with LazyLogging {

  def clusterConnectionsFlow(
      targetHost: => Future[Uri],
      scanningInterval: FiniteDuration,
      cluster: String
  )(implicit system: ActorSystem,
    materializer: Materializer,
    ec: ExecutionContext): Source[Connections, Cancellable] = {
    val http                   = Http(system)
    val settings = ConnectionPoolSettings(system)
      .withMaxConnections(1)
      .withMinConnections(1)
      .withMaxOpenRequests(2)
      .withMaxRetries(3)
      .withUpdatedConnectionSettings(
        _.withIdleTimeout(scanningInterval.plus(1.second))
      )
    Source
      .tick(0.millis, scanningInterval, {})
      .mapAsync(1)(_ => targetHost)
      .mapAsync(1)(host => {
        val query = s"SELECT host_address FROM system.clusters WHERE cluster='$cluster'"
        val request =
          toRequest(host, query, None, QuerySettings(readOnly = ReadQueries, idempotent = Some(true)), None)(
            system.settings.config
          )
        processClickhouseResponse(http.singleRequest(request, settings = settings), query, host, None)
          .map(splitResponse)
          .map(_.toSet.filter(_.nonEmpty))
          .map(result => {
            if (result.isEmpty) {
              throw new IllegalArgumentException(
                s"Could not determine clickhouse cluster hosts for cluster $cluster and host $host. " +
                s"This could indicate that you are trying to use the cluster balancer to connect to a non cluster based clickhouse server. " +
                s"Please use the `SingleHostQueryBalancer` in that case."
              )
            }
            Connections(result.map(ClickhouseHostBuilder.toHost(_, Some(8123))))
          })
      })
  }
} 
Example 6
Source File: ClusterAwareHostBalancer.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.balancing

import akka.actor.{ActorRef, ActorSystem}
import akka.http.scaladsl.model.Uri
import akka.pattern.ask
import akka.stream.scaladsl.Sink
import akka.stream.{ActorAttributes, Materializer, Supervision}
import akka.util.Timeout
import com.crobox.clickhouse.balancing.discovery.ConnectionManagerActor.{GetConnection, LogDeadConnections}
import com.crobox.clickhouse.balancing.discovery.cluster.ClusterConnectionFlow

import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}


case class ClusterAwareHostBalancer(host: Uri,
                                    cluster: String = "cluster",
                                    manager: ActorRef,
                                    scanningInterval: FiniteDuration)(
    implicit system: ActorSystem,
    connectionRetrievalTimeout: Timeout,
    ec: ExecutionContext,
    materializer: Materializer
) extends HostBalancer {

  ClusterConnectionFlow
    .clusterConnectionsFlow(Future.successful(host), scanningInterval, cluster)
    .withAttributes(
      ActorAttributes.supervisionStrategy({
        case ex: IllegalArgumentException =>
          logger.error("Failed resolving hosts for cluster, stopping the flow.", ex)
          Supervision.stop
        case ex =>
          logger.error("Failed resolving hosts for cluster, resuming.", ex)
          Supervision.Resume
      })
    )
    .runWith(Sink.actorRef(manager, LogDeadConnections))

  override def nextHost: Future[Uri] =
    (manager ? GetConnection()).mapTo[Uri]
} 
Example 7
Source File: ClickhouseHostBuilder.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.internal

import akka.http.scaladsl.model.Uri

private[clickhouse] trait ClickhouseHostBuilder {

  def toHost(host: String, port: Option[Int]): Uri =
    if (host.startsWith("http:") || host.startsWith("https:")) {
      val uri = Uri(host)
      port.map(uri.withPort).getOrElse(uri)
    } else {
      val uri = Uri("http://" + host)
      port.map(uri.withPort).getOrElse(uri)
    }

}

private[clickhouse] object ClickhouseHostBuilder extends ClickhouseHostBuilder {} 
Example 8
Source File: ClickhouseQueryBuilder.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.internal

import akka.http.scaladsl.model.Uri.Query
import akka.http.scaladsl.model.headers.{HttpEncodingRange, RawHeader}
import akka.http.scaladsl.model.{HttpMethods, HttpRequest, RequestEntity, Uri}
import com.crobox.clickhouse.internal.QuerySettings.ReadQueries
import com.crobox.clickhouse.internal.progress.ProgressHeadersAsEventsStage
import com.typesafe.config.Config
import com.typesafe.scalalogging.LazyLogging

import scala.collection.immutable

private[clickhouse] trait ClickhouseQueryBuilder extends LazyLogging {

  private val Headers = {
    import HttpEncodingRange.apply
    import akka.http.scaladsl.model.headers.HttpEncodings.{deflate, gzip}
    import akka.http.scaladsl.model.headers.`Accept-Encoding`
    immutable.Seq(`Accept-Encoding`(gzip, deflate))
  }
  private val MaxUriSize = 16 * 1024

  protected def toRequest(uri: Uri,
                          query: String,
                          queryIdentifier: Option[String],
                          settings: QuerySettings,
                          entity: Option[RequestEntity])(config: Config): HttpRequest = {
    val urlQuery = uri.withQuery(Query(Query("query" -> query) ++ settings.withFallback(config).asQueryParams: _*))
    entity match {
      case Some(e) =>
        logger.debug(s"Executing clickhouse query [$query] on host [${uri
          .toString()}] with entity payload of length ${e.contentLengthOption}")
        HttpRequest(
          method = HttpMethods.POST,
          uri = urlQuery,
          entity = e,
          headers = Headers ++ queryIdentifier.map(RawHeader(ProgressHeadersAsEventsStage.InternalQueryIdentifier, _))
        )
      case None
          if settings.idempotent.contains(true) && settings.readOnly == ReadQueries && urlQuery
            .toString()
            .getBytes
            .length < MaxUriSize => //max url size
        logger.debug(s"Executing clickhouse idempotent query [$query] on host [${uri.toString()}]")
        HttpRequest(
          method = HttpMethods.GET,
          uri = urlQuery.withQuery(
            urlQuery
              .query()
              .filterNot(
                _._1 == "readonly"
              ) //get requests are readonly by default, if we send the readonly flag clickhouse will fail the request
          ),
          headers = Headers ++ queryIdentifier.map(RawHeader(ProgressHeadersAsEventsStage.InternalQueryIdentifier, _))
        )
      case None =>
        logger.debug(s"Executing clickhouse query [$query] on host [${uri.toString()}]")
        HttpRequest(
          method = HttpMethods.POST,
          uri = uri.withQuery(settings.withFallback(config).asQueryParams),
          entity = query,
          headers = Headers ++ queryIdentifier.map(RawHeader(ProgressHeadersAsEventsStage.InternalQueryIdentifier, _))
        )
    }
  }

} 
Example 9
Source File: ClusterConnectionFlowTest.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.balancing.discovery.cluster

import akka.http.scaladsl.model.Uri
import akka.stream.scaladsl.{Keep, Sink}
import com.crobox.clickhouse.ClickhouseClientAsyncSpec
import com.crobox.clickhouse.internal.ClickhouseHostBuilder

import scala.concurrent._
import scala.concurrent.duration._

class ClusterConnectionFlowTest extends ClickhouseClientAsyncSpec {

  private val clickhouseUri: Uri = ClickhouseHostBuilder.toHost("localhost", Some(8123))
  it should "select cluster hosts" in {
    val (_, futureResult) = ClusterConnectionFlow
      .clusterConnectionsFlow(Future.successful(clickhouseUri), 2 seconds, "test_shard_localhost")
      .toMat(Sink.head)(Keep.both)
      .run()
    futureResult.map(result => {
      result.hosts should contain only ClickhouseHostBuilder.toHost("127.0.0.1", Some(8123))
    })
  }

  it should "fail for non existing cluster" in {
    val (_, futureResult) = ClusterConnectionFlow
      .clusterConnectionsFlow(Future.successful(clickhouseUri), 2 seconds, "cluster")
      .toMat(Sink.head)(Keep.both)
      .run()
    futureResult
      .map(_ => {
        fail("Returned answer for non existing clsuter")
      })
      .recover {
        case _: IllegalArgumentException => succeed
      }
  }

} 
Example 10
Source File: SingleHostBalancerTest.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse.balancing

import akka.http.scaladsl.model.Uri
import com.crobox.clickhouse.ClickhouseClientAsyncSpec

import scala.concurrent.Future

class SingleHostBalancerTest extends ClickhouseClientAsyncSpec {

  it should "return the same host every time" in {
    val uri      = Uri("localhost").withPort(8123)
    val balancer = SingleHostBalancer(uri)
    val assertions = (1 to 10)
      .map(_ => {
        balancer.nextHost.map(_ shouldEqual uri)
      })
    Future.sequence(assertions).map(_ => succeed)
  }

} 
Example 11
Source File: ClickhouseClientAsyncSpec.scala    From clickhouse-scala-client   with GNU Lesser General Public License v3.0 5 votes vote down vote up
package com.crobox.clickhouse

import akka.actor.{ActorRef, ActorSystem}
import akka.http.scaladsl.model.Uri
import akka.pattern.ask
import akka.stream.{ActorMaterializer, Materializer}
import akka.testkit.TestKit
import akka.util.Timeout
import akka.util.Timeout.durationToTimeout
import com.crobox.clickhouse.balancing.HostBalancer
import com.crobox.clickhouse.balancing.discovery.ConnectionManagerActor.GetConnection
import com.typesafe.config.{Config, ConfigFactory}
import org.scalatest._

import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
import org.scalatest.flatspec.AsyncFlatSpecLike
import org.scalatest.matchers.should.Matchers

abstract class ClickhouseClientAsyncSpec(val config: Config = ConfigFactory.load())
    extends TestKit(ActorSystem("clickhouseClientAsyncTestSystem", config.getConfig("crobox.clickhouse.client")))
    with AsyncFlatSpecLike
    with Matchers
    with BeforeAndAfterAll
    with BeforeAndAfterEach {

  implicit val timeout: Timeout = 5.second
  implicit val materializer: Materializer = ActorMaterializer()

  override protected def afterAll(): Unit = {
    try super.afterAll()
    finally Await.result(system.terminate(), 10.seconds)
  }

  def requestParallelHosts(balancer: HostBalancer, connections: Int = 10): Future[Seq[Uri]] =
    Future.sequence(
      (1 to connections)
        .map(_ => {
          balancer.nextHost
        })
    )

  def getConnections(manager: ActorRef, connections: Int = 10): Future[Seq[Uri]] =
    Future.sequence(
      (1 to connections)
        .map(_ => {
          (manager ? GetConnection()).mapTo[Uri]
        })
    )

  //  TODO change this methods to custom matchers
  def returnsConnectionsInRoundRobinFashion(manager: ActorRef, expectedConnections: Set[Uri]): Future[Assertion] = {
    val RequestConnectionsPerHost = 100
    getConnections(manager, RequestConnectionsPerHost * expectedConnections.size)
      .map(connections => {
        expectedConnections.foreach(
          uri =>
            connections
              .count(_ == uri) shouldBe (RequestConnectionsPerHost +- RequestConnectionsPerHost / 10) //10% delta for warm-up phase
        )
        succeed
      })
  }

} 
Example 12
Source File: HttpRequestConversionSupport.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.persistence.serializers

import java.net.InetAddress

import akka.http.scaladsl.model.HttpHeader.ParsingResult
import akka.http.scaladsl.model.{ HttpEntity, HttpHeader, HttpMethod, HttpMethods, HttpProtocol, HttpRequest, RemoteAddress, Uri }
import com.ing.wbaa.rokku.proxy.data.{ UserAssumeRole, UserRawJson }
import spray.json.DefaultJsonProtocol

import scala.collection.immutable

trait HttpRequestConversionSupport extends DefaultJsonProtocol {

  case class SimplifiedRemoteAddress(host: String) {
    def toRemoteAddr: RemoteAddress = {
      val a = host.split(":")
      RemoteAddress(InetAddress.getByName(a(0)), Some(a(1).toInt))
    }
  }

  case class SimplifiedHttpRequest(method: String, uri: String, headers: List[String], entity: String, httpProtocol: String)

  implicit val httpRequestF = jsonFormat5(SimplifiedHttpRequest)
  implicit val userRoleF = jsonFormat1(UserAssumeRole)
  implicit val userSTSF = jsonFormat5(UserRawJson)
  implicit val remoteAddressF = jsonFormat1(SimplifiedRemoteAddress)

  private[persistence] def convertAkkaHeadersToStrings(headers: Seq[HttpHeader]): List[String] = headers.map(h => s"${h.name()}=${h.value()}").toList

  private def convertStringsToAkkaHeaders(headers: List[String]): immutable.Seq[HttpHeader] = headers.map { p =>
    val kv = p.split("=")
    HttpHeader.parse(kv(0), kv(1)) match {
      case ParsingResult.Ok(header, _) => header
      case ParsingResult.Error(error)  => throw new Exception(s"Unable to convert to HttpHeader: ${error.summary}")
    }
  }

  private def httpMethodFrom(m: String): HttpMethod = m match {
    case "GET"    => HttpMethods.GET
    case "HEAD"   => HttpMethods.HEAD
    case "PUT"    => HttpMethods.PUT
    case "POST"   => HttpMethods.POST
    case "DELETE" => HttpMethods.DELETE
  }

  private[persistence] def toAkkaHttpRequest(s: SimplifiedHttpRequest): HttpRequest =
    HttpRequest(
      httpMethodFrom(s.method),
      Uri(s.uri),
      convertStringsToAkkaHeaders(s.headers),
      HttpEntity(s.entity),
      HttpProtocol(s.httpProtocol)
    )
} 
Example 13
Source File: AuthenticationProviderSTS.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.provider

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.headers.RawHeader
import akka.http.scaladsl.model.{ HttpRequest, StatusCodes, Uri }
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.Materializer
import com.ing.wbaa.rokku.proxy.config.StsSettings
import com.ing.wbaa.rokku.proxy.data.{ AwsRequestCredential, JsonProtocols, RequestId, User, UserRawJson }
import com.ing.wbaa.rokku.proxy.handler.LoggerHandlerWithId
import com.ing.wbaa.rokku.proxy.util.JwtToken

import scala.concurrent.{ ExecutionContext, Future }

trait AuthenticationProviderSTS extends JsonProtocols with JwtToken {

  private val logger = new LoggerHandlerWithId

  import AuthenticationProviderSTS.STSException
  import spray.json._

  protected[this] implicit def system: ActorSystem
  protected[this] implicit def executionContext: ExecutionContext
  protected[this] implicit def materializer: Materializer

  protected[this] def stsSettings: StsSettings

  protected[this] def areCredentialsActive(awsRequestCredential: AwsRequestCredential)(implicit id: RequestId): Future[Option[User]] = {
    val QueryParameters =
      Map("accessKey" -> awsRequestCredential.accessKey.value) ++
        awsRequestCredential.sessionToken.map(s => "sessionToken" -> s.value)

    val uri = stsSettings.stsBaseUri
      .withPath(Uri.Path("/isCredentialActive"))
      .withQuery(Uri.Query(QueryParameters))

    Http()
      .singleRequest(
        HttpRequest(uri = uri)
          .addHeader(RawHeader("Authorization", createInternalToken))
          .addHeader(RawHeader("x-rokku-request-id", id.value))
      )
      .flatMap { response =>
        response.status match {

          case StatusCodes.OK =>
            Unmarshal(response.entity).to[String].map { jsonString =>
              Some(User(jsonString.parseJson.convertTo[UserRawJson]))
            }

          case StatusCodes.Forbidden =>
            logger.error(s"User not authenticated " +
              s"with accessKey (${awsRequestCredential.accessKey.value}) " +
              s"and sessionToken (${awsRequestCredential.sessionToken})")
            Future.successful(None)

          case c =>
            val msg = s"Received unexpected StatusCode ($c) for " +
              s"accessKey (${awsRequestCredential.accessKey.value}) " +
              s"and sessionToken (${awsRequestCredential.sessionToken})"
            logger.error(msg)
            Future.failed(STSException(msg))
        }
      }
  }
}

object AuthenticationProviderSTS {
  final case class STSException(private val message: String, private val cause: Throwable = None.orNull)
    extends Exception(message, cause)
} 
Example 14
Source File: StorageS3Settings.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.config

import akka.actor.{ ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider }
import akka.http.scaladsl.model.Uri
import com.ing.wbaa.rokku.proxy.data.HealthCheck.{ HCMethod, RGWListBuckets, S3ListBucket }
import com.typesafe.config.Config

class StorageS3Settings(config: Config) extends Extension {
  private val storageS3Host: String = config.getString("rokku.storage.s3.host")
  private val storageS3Port: Int = config.getInt("rokku.storage.s3.port")
  val storageS3Authority = Uri.Authority(Uri.Host(storageS3Host), storageS3Port)

  val storageS3AdminAccesskey: String = config.getString("rokku.storage.s3.admin.accesskey")
  val storageS3AdminSecretkey: String = config.getString("rokku.storage.s3.admin.secretkey")
  val awsRegion: String = config.getString("rokku.storage.s3.region")
  val v2SignatureEnabled: Boolean = config.getBoolean("rokku.storage.s3.v2SignatureEnabled")
  val isRequestUserQueueEnabled: Boolean = config.getBoolean("rokku.storage.s3.request.queue.enable")
  private val hcMethodString = config.getString("rokku.storage.s3.healthCheck.method")
  val hcMethod: HCMethod = hcMethodString match {
    case "rgwListBuckets" => RGWListBuckets
    case "s3ListBucket"   => S3ListBucket
  }
  val hcInterval: Long = config.getLong("rokku.storage.s3.healthCheck.interval")
  val bucketName: String = config.getString("rokku.storage.s3.healthCheck.bucketName")
  val isCacheEnabled: Boolean = config.getBoolean("rokku.storage.s3.enabledCache")
  val eligibleCachePaths: Array[String] = config.getString("rokku.storage.s3.eligibleCachePaths").trim().split(",")
  val maxEligibleCacheObjectSizeInBytes: Long = config.getLong("rokku.storage.s3.maxEligibleCacheObjectSizeInBytes")
  val strictCacheDownloadTimeoutInSeconds: Int = config.getInt("rokku.storage.s3.strictCacheDownloadTimeoutInSeconds")

}

object StorageS3Settings extends ExtensionId[StorageS3Settings] with ExtensionIdProvider {
  override def createExtension(system: ExtendedActorSystem): StorageS3Settings = new StorageS3Settings(system.settings.config)
  override def lookup(): ExtensionId[StorageS3Settings] = StorageS3Settings
} 
Example 15
Source File: StsSdkHelpers.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.testkit.awssdk

import akka.http.scaladsl.model.Uri
import com.amazonaws.auth.{AWSStaticCredentialsProvider, BasicAWSCredentials}
import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration
import com.amazonaws.regions.Regions
import com.amazonaws.services.securitytoken.{AWSSecurityTokenService, AWSSecurityTokenServiceClientBuilder}


trait StsSdkHelpers {
  def getAmazonSTSSdk(uri: Uri): AWSSecurityTokenService = {
    AWSSecurityTokenServiceClientBuilder
      .standard()
      .withCredentials(new AWSStaticCredentialsProvider(new BasicAWSCredentials("accesskey", "secretkey")))
      .withEndpointConfiguration(new EndpointConfiguration(
        s"${uri.scheme}://${uri.authority.host.address}:${uri.authority.port}", Regions.DEFAULT_REGION.getName)
      )
      .build()
  }
} 
Example 16
Source File: S3RequestSpec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.data

import akka.http.scaladsl.model.{ HttpMethods, MediaTypes, RemoteAddress, Uri }
import org.scalatest.diagrams.Diagrams
import org.scalatest.flatspec.AnyFlatSpec

class S3RequestSpec extends AnyFlatSpec with Diagrams {

  val testCred = AwsRequestCredential(AwsAccessKey("ak"), Some(AwsSessionToken("st")))

  "S3Request" should "parse an S3 request from an http Path and Method" in {
    val result = S3Request(testCred, Uri.Path("/demobucket"), HttpMethods.GET, RemoteAddress.Unknown, HeaderIPs(), MediaTypes.`text/plain`)
    assert(result == S3Request(testCred, Some("/demobucket"), None, Read("GET")))
  }

  it should "parse an S3 request from an http Path with object and Method" in {
    val result = S3Request(testCred, Uri.Path("/demobucket/demoobject"), HttpMethods.GET, RemoteAddress.Unknown, HeaderIPs(), MediaTypes.`text/plain`)
    assert(result == S3Request(testCred, Some("/demobucket/demoobject"), Some("demoobject"), Read("GET")))
  }

  it should "parse an S3 request from an http Path with subfolder and Method" in {
    val result = S3Request(testCred, Uri.Path("/demobucket/subfolder1/"), HttpMethods.GET, RemoteAddress.Unknown, HeaderIPs(), MediaTypes.`text/plain`)
    assert(result == S3Request(testCred, Some("/demobucket/subfolder1/"), None, Read("GET")))
  }

  it should "parse none for bucket if path is only root" in {
    val result = S3Request(testCred, Uri.Path("/"), HttpMethods.GET, RemoteAddress.Unknown, HeaderIPs(), MediaTypes.`text/plain`)
    assert(result == S3Request(testCred, None, None, Read("GET")))
  }

  it should "parse none for bucket if path is empty" in {
    val result = S3Request(testCred, Uri.Path(""), HttpMethods.GET, RemoteAddress.Unknown, HeaderIPs(), MediaTypes.`text/plain`)
    assert(result == S3Request(testCred, None, None, Read("GET")))
  }

  it should "set access to write for anything but GET" in {
    val result = S3Request(testCred, Uri.Path("/demobucket"), HttpMethods.POST, RemoteAddress.Unknown, HeaderIPs(), MediaTypes.`text/plain`)
    assert(result == S3Request(testCred, Some("/demobucket"), None, Post("POST")))
  }

} 
Example 17
Source File: FilterRecursiveListBucketHandlerSpec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.handler

import akka.NotUsed
import akka.actor.ActorSystem
import akka.http.scaladsl.model.{ HttpMethods, MediaTypes, RemoteAddress, Uri }
import akka.stream.scaladsl.{ Sink, Source }
import akka.stream.{ ActorMaterializer, Materializer }
import akka.util.ByteString
import com.ing.wbaa.rokku.proxy.data._
import org.scalatest.diagrams.Diagrams
import org.scalatest.wordspec.AsyncWordSpec

import scala.concurrent.ExecutionContext

class FilterRecursiveListBucketHandlerSpec extends AsyncWordSpec with Diagrams with FilterRecursiveListBucketHandler {

  implicit val system: ActorSystem = ActorSystem.create("test-system")
  override implicit val executionContext: ExecutionContext = system.dispatcher
  implicit val requestId: RequestId = RequestId("test")

  implicit def materializer: Materializer = ActorMaterializer()(system)

  def isUserAuthorizedForRequest(request: S3Request, user: User)(implicit id: RequestId): Boolean = {
    user match {
      case User(userName, _, _, _, _) if userName.value == "admin" => true
      case User(userName, _, _, _, _) if userName.value == "user1" =>
        request match {
          case S3Request(_, s3BucketPath, _, _, _, _, _) =>
            if (s3BucketPath.get.startsWith("/demobucket/user/user2")) false else true
        }
      case _ => true
    }
  }

  val listBucketXmlResponse: String = scala.io.Source.fromResource("listBucket.xml").mkString.stripMargin.trim

  val adminUser = User(UserRawJson("admin", Some(Set.empty[String]), "a", "s", None))
  val user1 = User(UserRawJson("user1", Some(Set.empty[String]), "a", "s", None))
  val s3Request = S3Request(AwsRequestCredential(AwsAccessKey(""), None), Uri.Path("/demobucket/user"), HttpMethods.GET, RemoteAddress.Unknown, HeaderIPs(), MediaTypes.`text/plain`)
  val data: Source[ByteString, NotUsed] = Source.single(ByteString.fromString(listBucketXmlResponse))

  "List bucket object response" should {
    "returns all objects to admin" in {
      data.via(filterRecursiveListObjects(adminUser, s3Request)).map(_.utf8String).runWith(Sink.seq).map(x => {
        assert(x.mkString.stripMargin.equals(listBucketXmlResponse))
      })
    }

    val filteredXml: String = scala.io.Source.fromResource("filteredListBucket.xml").mkString.stripMargin.trim
    "returns filtered object for user 1" in {
      data.via(filterRecursiveListObjects(user1, s3Request)).map(_.utf8String).runWith(Sink.seq).map(x => {
        assert(x.mkString.stripMargin.replaceAll("[\n\r\\s]", "")
          .equals(filteredXml.replaceAll("[\n\r\\s]", "")))
      })
    }
  }
} 
Example 18
Source File: CacheRulesV1Spec.scala    From rokku   with Apache License 2.0 5 votes vote down vote up
package com.ing.wbaa.rokku.proxy.cache

import akka.actor.ActorSystem
import akka.http.scaladsl.model.Uri.Path
import akka.http.scaladsl.model.{ HttpMethod, HttpMethods, HttpRequest, Uri }
import com.ing.wbaa.rokku.proxy.config.StorageS3Settings
import com.ing.wbaa.rokku.proxy.data.RequestId
import com.ing.wbaa.rokku.proxy.handler.parsers.RequestParser
import org.scalatest.diagrams.Diagrams
import org.scalatest.wordspec.AnyWordSpec

class CacheRulesV1Spec extends AnyWordSpec with Diagrams with CacheRulesV1 with RequestParser {

  private implicit val id = RequestId("testRequestId")

  val system: ActorSystem = ActorSystem.create("test-system")
  override val storageS3Settings: StorageS3Settings = new StorageS3Settings(system.settings.config) {
    override val storageS3Authority: Uri.Authority = Uri.Authority(Uri.Host("1.2.3.4"), 1234)
  }

  override def getMaxEligibleCacheObjectSizeInBytes(implicit id: RequestId): Long = 5242880L

  override def getEligibleCachePaths(implicit id: RequestId): Array[String] = "/home/,/test/".trim.split(",")

  override def getHeadEnabled(implicit id: RequestId): Boolean = true

  private val uri = Uri("http", Uri.Authority(Uri.Host("1.2.3.4")), Path(""), None, None)

  private val methods = Seq(HttpMethods.GET, HttpMethods.PUT, HttpMethods.POST, HttpMethods.DELETE, HttpMethods.HEAD)

  "Cache rules v1 set isEligibleToBeCached " should {

    methods.foreach { method =>
      testIsEligibleToBeCached(method, "/home/test", HttpRequest.apply(method = method, uri = uri.copy(path = Path("/home/test"))))
      testIsEligibleToBeCached(method, "/home2/test", HttpRequest.apply(method = method, uri = uri.copy(path = Path("/home2/test"))))
      testIsEligibleToBeCached(method, "/test/abc", HttpRequest.apply(method = method, uri = uri.copy(path = Path("/test/abc"))))
      testIsEligibleToBeCached(method, "/testtest/test", HttpRequest.apply(method = method, uri = uri.copy(path = Path("/testtest/test"))))
    }
  }

  private def testIsEligibleToBeCached(method: HttpMethod, path: String, request: HttpRequest): Unit = {
    method match {
      case HttpMethods.GET | HttpMethods.HEAD if storageS3Settings.eligibleCachePaths.exists(path.startsWith) =>
        s"for method=$method and path=$path to true" in {
          assert(isEligibleToBeCached(request))
        }
      case _ =>
        s"for method=$method and path=$path to false" in {
          assert(!isEligibleToBeCached(request))
        }
    }
  }

  "Cache rules v1 set isEligibleToBeInvalidated" should {

    methods.foreach { method =>
      val request = HttpRequest.apply(method = method, uri)
      method match {
        case HttpMethods.POST | HttpMethods.PUT | HttpMethods.DELETE =>
          s"for method=$method to true" in {
            assert(isEligibleToBeInvalidated(request))
          }
        case _ =>
          s"for method=$method to false" in {
            assert(!isEligibleToBeInvalidated(request))
          }
      }
    }
  }
} 
Example 19
Source File: RefreshTokenStrategy.scala    From akka-http-oauth2-client   with Apache License 2.0 5 votes vote down vote up
package com.github.dakatsuka.akka.http.oauth2.client.strategy

import akka.NotUsed
import akka.http.scaladsl.model.headers.RawHeader
import akka.http.scaladsl.model.{ FormData, HttpCharsets, HttpRequest, Uri }
import akka.stream.scaladsl.Source
import com.github.dakatsuka.akka.http.oauth2.client.{ ConfigLike, GrantType }

class RefreshTokenStrategy extends Strategy(GrantType.RefreshToken) {
  override def getAuthorizeUrl(config: ConfigLike, params: Map[String, String] = Map.empty): Option[Uri] = None

  override def getAccessTokenSource(config: ConfigLike, params: Map[String, String] = Map.empty): Source[HttpRequest, NotUsed] = {
    require(params.contains("refresh_token"))

    val uri = Uri
      .apply(config.site.toASCIIString)
      .withPath(Uri.Path(config.tokenUrl))

    val request = HttpRequest(
      method = config.tokenMethod,
      uri = uri,
      headers = List(
        RawHeader("Accept", "*/*")
      ),
      FormData(
        params ++ Map(
          "grant_type"    -> grant.value,
          "client_id"     -> config.clientId,
          "client_secret" -> config.clientSecret
        )
      ).toEntity(HttpCharsets.`UTF-8`)
    )

    Source.single(request)
  }
} 
Example 20
Source File: ImplicitStrategy.scala    From akka-http-oauth2-client   with Apache License 2.0 5 votes vote down vote up
package com.github.dakatsuka.akka.http.oauth2.client.strategy

import akka.NotUsed
import akka.http.scaladsl.model.{ HttpRequest, Uri }
import akka.stream.scaladsl.Source
import com.github.dakatsuka.akka.http.oauth2.client.{ ConfigLike, GrantType }

class ImplicitStrategy extends Strategy(GrantType.Implicit) {
  override def getAuthorizeUrl(config: ConfigLike, params: Map[String, String] = Map.empty): Option[Uri] = {
    val uri = Uri
      .apply(config.site.toASCIIString)
      .withPath(Uri.Path(config.authorizeUrl))
      .withQuery(Uri.Query(params ++ Map("response_type" -> "token", "client_id" -> config.clientId)))

    Option(uri)
  }

  override def getAccessTokenSource(config: ConfigLike, params: Map[String, String] = Map.empty): Source[HttpRequest, NotUsed] =
    Source.empty
} 
Example 21
Source File: Client.scala    From akka-http-oauth2-client   with Apache License 2.0 5 votes vote down vote up
package com.github.dakatsuka.akka.http.oauth2.client

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.headers.OAuth2BearerToken
import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, Uri }
import akka.stream.Materializer
import akka.stream.scaladsl.{ Flow, Sink }
import com.github.dakatsuka.akka.http.oauth2.client.Error.UnauthorizedException
import com.github.dakatsuka.akka.http.oauth2.client.strategy.Strategy

import scala.concurrent.{ ExecutionContext, Future }

class Client(config: ConfigLike, connection: Option[Flow[HttpRequest, HttpResponse, _]] = None)(implicit system: ActorSystem)
    extends ClientLike {
  def getAuthorizeUrl[A <: GrantType](grant: A, params: Map[String, String] = Map.empty)(implicit s: Strategy[A]): Option[Uri] =
    s.getAuthorizeUrl(config, params)

  def getAccessToken[A <: GrantType](
      grant: A,
      params: Map[String, String] = Map.empty
  )(implicit s: Strategy[A], ec: ExecutionContext, mat: Materializer): Future[Either[Throwable, AccessToken]] = {
    val source = s.getAccessTokenSource(config, params)

    source
      .via(connection.getOrElse(defaultConnection))
      .mapAsync(1)(handleError)
      .mapAsync(1)(AccessToken.apply)
      .runWith(Sink.head)
      .map(Right.apply)
      .recover {
        case ex => Left(ex)
      }
  }

  def getConnectionWithAccessToken(accessToken: AccessToken): Flow[HttpRequest, HttpResponse, _] =
    Flow[HttpRequest]
      .map(_.addCredentials(OAuth2BearerToken(accessToken.accessToken)))
      .via(connection.getOrElse(defaultConnection))

  private def defaultConnection: Flow[HttpRequest, HttpResponse, _] =
    config.site.getScheme match {
      case "http"  => Http().outgoingConnection(config.getHost, config.getPort)
      case "https" => Http().outgoingConnectionHttps(config.getHost, config.getPort)
    }

  private def handleError(response: HttpResponse)(implicit ec: ExecutionContext, mat: Materializer): Future[HttpResponse] = {
    if (response.status.isFailure()) UnauthorizedException.fromHttpResponse(response).flatMap(Future.failed(_))
    else Future.successful(response)
  }
}

object Client {
  def apply(config: ConfigLike)(implicit system: ActorSystem): Client =
    new Client(config)

  def apply(config: ConfigLike, connection: Flow[HttpRequest, HttpResponse, _])(implicit system: ActorSystem): Client =
    new Client(config, Some(connection))
} 
Example 22
Source File: AttachmentSupportTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.database.test

import akka.http.scaladsl.model.Uri
import akka.stream.scaladsl.Source
import akka.stream.{ActorMaterializer, Materializer}
import akka.util.CompactByteString
import common.WskActorSystem
import org.junit.runner.RunWith
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpec, Matchers}
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.database.{AttachmentSupport, InliningConfig}
import org.apache.openwhisk.core.entity.WhiskEntity
import org.apache.openwhisk.core.entity.size._

@RunWith(classOf[JUnitRunner])
class AttachmentSupportTests extends FlatSpec with Matchers with ScalaFutures with WskActorSystem {

  behavior of "Attachment inlining"

  implicit val materializer: Materializer = ActorMaterializer()

  it should "not inline if maxInlineSize set to zero" in {
    val inliner = new AttachmentSupportTestMock(InliningConfig(maxInlineSize = 0.KB))
    val bs = CompactByteString("hello world")

    val bytesOrSource = inliner.inlineOrAttach(Source.single(bs)).futureValue
    val uri = inliner.uriOf(bytesOrSource, "foo")

    uri shouldBe Uri("test:foo")
  }

  class AttachmentSupportTestMock(val inliningConfig: InliningConfig) extends AttachmentSupport[WhiskEntity] {
    override protected[core] implicit val materializer: Materializer = ActorMaterializer()
    override protected def attachmentScheme: String = "test"
    override protected def executionContext = actorSystem.dispatcher
    override protected[database] def put(d: WhiskEntity)(implicit transid: TransactionId) = ???
  }
} 
Example 23
Source File: SwaggerRoutesTests.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.controller.test

import org.junit.runner.RunWith
import org.scalatest.BeforeAndAfterEach
import org.scalatest.junit.JUnitRunner

import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.model.Uri

import spray.json._
import spray.json.DefaultJsonProtocol._

import org.apache.openwhisk.core.controller.SwaggerDocs



@RunWith(classOf[JUnitRunner])
class SwaggerRoutesTests extends ControllerTestCommon with BeforeAndAfterEach {

  behavior of "Swagger routes"

  it should "server docs" in {
    implicit val tid = transid()
    val swagger = new SwaggerDocs(Uri.Path.Empty, "infoswagger.json")
    Get("/docs") ~> Route.seal(swagger.swaggerRoutes) ~> check {
      status shouldBe PermanentRedirect
      header("location").get.value shouldBe "docs/index.html?url=/api-docs"
    }

    Get("/api-docs") ~> Route.seal(swagger.swaggerRoutes) ~> check {
      status shouldBe OK
      responseAs[JsObject].fields("swagger") shouldBe JsString("2.0")
    }
  }
} 
Example 24
Source File: RestUtil.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package system.rest

import akka.http.scaladsl.model.Uri

import scala.util.Try
import io.restassured.RestAssured
import io.restassured.config.RestAssuredConfig
import io.restassured.config.SSLConfig
import common.WhiskProperties
import spray.json._


  def getJsonSchema(model: String, path: String = "/api/v1"): JsValue = {
    val response = RestAssured
      .given()
      .config(sslconfig)
      .get(getServiceURL() + s"${if (path.endsWith("/")) path else path + "/"}api-docs")

    assert(response.statusCode() == 200)

    val body = Try { response.body().asString().parseJson.asJsObject }
    val schema = body map { _.fields("definitions").asJsObject }
    val t = schema map { _.fields(model).asJsObject } getOrElse JsObject.empty
    val d = JsObject("definitions" -> (schema getOrElse JsObject.empty))
    JsObject(t.fields ++ d.fields)
  }
} 
Example 25
Source File: CloudFrontSigner.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.core.database.s3
import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets.UTF_8
import java.security.PrivateKey
import java.time.Instant
import java.util.Date

import akka.http.scaladsl.model.Uri
import com.amazonaws.auth.PEM
import com.amazonaws.services.cloudfront.CloudFrontUrlSigner
import com.amazonaws.services.cloudfront.util.SignerUtils
import com.amazonaws.services.cloudfront.util.SignerUtils.Protocol

import scala.concurrent.duration._

case class CloudFrontConfig(domainName: String,
                            keyPairId: String,
                            privateKey: String,
                            timeout: FiniteDuration = 10.minutes)

case class CloudFrontSigner(config: CloudFrontConfig) extends UrlSigner {
  private val privateKey = createPrivateKey(config.privateKey)

  override def getSignedURL(s3ObjectKey: String): Uri = {
    val resourcePath = SignerUtils.generateResourcePath(Protocol.https, config.domainName, s3ObjectKey)
    val date = Date.from(Instant.now().plusSeconds(config.timeout.toSeconds))
    val url = CloudFrontUrlSigner.getSignedURLWithCannedPolicy(resourcePath, config.keyPairId, privateKey, date)
    Uri(url)
  }

  override def toString: String = s"CloudFront Signer - ${config.domainName}"

  private def createPrivateKey(keyContent: String): PrivateKey = {
    val is = new ByteArrayInputStream(keyContent.getBytes(UTF_8))
    PEM.readPrivateKey(is)
  }
} 
Example 26
Source File: ApiGwLauncher.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.standalone

import akka.actor.{ActorSystem, Scheduler}
import akka.http.scaladsl.model.Uri
import akka.pattern.RetrySupport
import org.apache.openwhisk.common.{Logging, TransactionId}
import org.apache.openwhisk.standalone.StandaloneDockerSupport.{containerName, createRunCmd}
import pureconfig._
import pureconfig.generic.auto._

import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}

class ApiGwLauncher(docker: StandaloneDockerClient, apiGwApiPort: Int, apiGwMgmtPort: Int, serverPort: Int)(
  implicit logging: Logging,
  ec: ExecutionContext,
  actorSystem: ActorSystem,
  tid: TransactionId)
    extends RetrySupport {
  private implicit val scd: Scheduler = actorSystem.scheduler
  case class RedisConfig(image: String)
  case class ApiGwConfig(image: String)
  private val redisConfig = loadConfigOrThrow[RedisConfig](StandaloneConfigKeys.redisConfigKey)
  private val apiGwConfig = loadConfigOrThrow[ApiGwConfig](StandaloneConfigKeys.apiGwConfigKey)

  def run(): Future[Seq[ServiceContainer]] = {
    for {
      (redis, redisSvcs) <- runRedis()
      _ <- waitForRedis(redis)
      (_, apiGwSvcs) <- runApiGateway(redis)
      _ <- waitForApiGw()
    } yield Seq(redisSvcs, apiGwSvcs).flatten
  }

  def runRedis(): Future[(StandaloneDockerContainer, Seq[ServiceContainer])] = {
    val defaultRedisPort = 6379
    val redisPort = StandaloneDockerSupport.checkOrAllocatePort(defaultRedisPort)
    logging.info(this, s"Starting Redis at $redisPort")

    val params = Map("-p" -> Set(s"$redisPort:6379"))
    val name = containerName("redis")
    val args = createRunCmd(name, dockerRunParameters = params)
    val f = docker.runDetached(redisConfig.image, args, shouldPull = true)
    val sc = ServiceContainer(redisPort, s"http://localhost:$redisPort", name)
    f.map(c => (c, Seq(sc)))
  }

  def waitForRedis(c: StandaloneDockerContainer): Future[Unit] = {
    retry(() => isRedisUp(c), 12, 5.seconds)
  }

  private def isRedisUp(c: StandaloneDockerContainer) = {
    val args = Seq(
      "run",
      "--rm",
      "--name",
      containerName("redis-test"),
      redisConfig.image,
      "redis-cli",
      "-h",
      c.addr.host,
      "-p",
      "6379",
      "ping")
    docker.runCmd(args, docker.clientConfig.timeouts.run).map(out => require(out.toLowerCase == "pong"))
  }

  def runApiGateway(redis: StandaloneDockerContainer): Future[(StandaloneDockerContainer, Seq[ServiceContainer])] = {
    val hostIp = StandaloneDockerSupport.getLocalHostIp()
    val env = Map(
      "BACKEND_HOST" -> s"http://$hostIp:$serverPort",
      "REDIS_HOST" -> redis.addr.host,
      "REDIS_PORT" -> "6379",
      //This is the name used to render the final url. So should be localhost
      //as that would be used by end user outside of docker
      "PUBLIC_MANAGEDURL_HOST" -> StandaloneDockerSupport.getLocalHostName(),
      "PUBLIC_MANAGEDURL_PORT" -> apiGwMgmtPort.toString)

    logging.info(this, s"Starting Api Gateway at api port: $apiGwApiPort, management port: $apiGwMgmtPort")
    val name = containerName("apigw")
    val params = Map("-p" -> Set(s"$apiGwApiPort:9000", s"$apiGwMgmtPort:8080"))
    val args = createRunCmd(name, env, params)

    //TODO ExecManifest is scoped to core. Ideally we would like to do
    // ExecManifest.ImageName(apiGwConfig.image).prefix.contains("openwhisk")
    val pull = apiGwConfig.image.startsWith("openwhisk")
    val f = docker.runDetached(apiGwConfig.image, args, pull)
    val sc = Seq(
      ServiceContainer(apiGwApiPort, s"http://localhost:$apiGwApiPort", s"$name, Api Gateway - Api Service "),
      ServiceContainer(apiGwMgmtPort, s"http://localhost:$apiGwMgmtPort", s"$name, Api Gateway - Management Service"))
    f.map(c => (c, sc))
  }

  def waitForApiGw(): Future[Unit] = {
    new ServerStartupCheck(
      Uri(s"http://${StandaloneDockerSupport.getLocalHostName()}:$apiGwApiPort/v1/apis"),
      "ApiGateway")
      .waitForServerToStart()
    Future.successful(())
  }
} 
Example 27
Source File: ServerStartupCheck.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.standalone

import java.net.{HttpURLConnection, URL}

import akka.http.scaladsl.model.Uri
import com.google.common.base.Stopwatch
import org.apache.openwhisk.utils.retry

import scala.concurrent.duration._

class ServerStartupCheck(uri: Uri, serverName: String) {

  def waitForServerToStart(): Unit = {
    val w = Stopwatch.createStarted()
    retry({
      println(s"Waiting for $serverName server at $uri to start since $w")
      require(getResponseCode() == 200)
    }, 30, Some(1.second))
  }

  private def getResponseCode(): Int = {
    val u = new URL(uri.toString())
    val hc = u.openConnection().asInstanceOf[HttpURLConnection]
    hc.setRequestMethod("GET")
    hc.connect()
    hc.getResponseCode
  }
} 
Example 28
Source File: Wsk.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.standalone

import akka.Done
import akka.actor.ActorSystem
import akka.http.scaladsl.model.Uri.Query
import akka.http.scaladsl.model.headers.{Accept, Authorization, BasicHttpCredentials}
import akka.http.scaladsl.model.{HttpHeader, HttpMethods, MediaTypes, Uri}
import org.apache.openwhisk.core.database.PutException
import org.apache.openwhisk.http.PoolingRestClient
import spray.json._

import scala.concurrent.{ExecutionContext, Future}

class Wsk(host: String, port: Int, authKey: String)(implicit system: ActorSystem) extends DefaultJsonProtocol {
  import PoolingRestClient._
  private implicit val ec: ExecutionContext = system.dispatcher
  private val client = new PoolingRestClient("http", host, port, 10)
  private val baseHeaders: List[HttpHeader] = {
    val Array(username, password) = authKey.split(':')
    List(Authorization(BasicHttpCredentials(username, password)), Accept(MediaTypes.`application/json`))
  }

  def updatePgAction(name: String, content: String): Future[Done] = {
    val js = actionJson(name, content)
    val params = Map("overwrite" -> "true")
    val uri = Uri(s"/api/v1/namespaces/_/actions/$name").withQuery(Query(params))
    client.requestJson[JsObject](mkJsonRequest(HttpMethods.PUT, uri, js, baseHeaders)).map {
      case Right(_)     => Done
      case Left(status) => throw PutException(s"Error creating action $name " + status)
    }
  }

  private def actionJson(name: String, code: String) = {
    s"""{
      |    "namespace": "_",
      |    "name": "$name",
      |    "exec": {
      |        "kind": "nodejs:default",
      |        "code": ${quote(code)}
      |    },
      |    "annotations": [{
      |        "key": "provide-api-key",
      |        "value": true
      |    }, {
      |        "key": "web-export",
      |        "value": true
      |    }, {
      |        "key": "raw-http",
      |        "value": false
      |    }, {
      |        "key": "final",
      |        "value": true
      |    }],
      |    "parameters": [{
      |        "key": "__ignore_certs",
      |        "value": true
      |    }]
      |}""".stripMargin.parseJson.asJsObject
  }

  private def quote(code: String) = {
    JsString(code).compactPrint
  }
} 
Example 29
Source File: InstallRouteMgmt.scala    From openwhisk   with Apache License 2.0 5 votes vote down vote up
package org.apache.openwhisk.standalone

import java.io.File

import akka.http.scaladsl.model.Uri
import org.apache.commons.io.{FileUtils, IOUtils}
import org.apache.openwhisk.common.TransactionId.systemPrefix
import org.apache.openwhisk.common.{Logging, TransactionId}

import scala.sys.process.ProcessLogger
import scala.util.Try
import scala.sys.process._

case class InstallRouteMgmt(workDir: File,
                            authKey: String,
                            apiHost: Uri,
                            namespace: String,
                            gatewayUrl: Uri,
                            wsk: String)(implicit log: Logging) {
  case class Action(name: String, desc: String)
  private val noopLogger = ProcessLogger(_ => ())
  private implicit val tid: TransactionId = TransactionId(systemPrefix + "apiMgmt")
  val actionNames = Array(
    Action("createApi", "Create an API"),
    Action("deleteApi", "Delete the API"),
    Action("getApi", "Retrieve the specified API configuration (in JSON format)"))

  def run(): Unit = {
    require(wskExists, s"wsk command not found at $wsk. Route management actions cannot be installed")
    log.info(this, packageUpdateCmd.!!.trim)
    //TODO Optimize to ignore this if package already installed
    actionNames.foreach { action =>
      val name = action.name
      val actionZip = new File(workDir, s"$name.zip")
      FileUtils.copyURLToFile(IOUtils.resourceToURL(s"/$name.zip"), actionZip)
      val cmd = createActionUpdateCmd(action, name, actionZip)
      val result = cmd.!!.trim
      log.info(this, s"Installed $name - $result")
      FileUtils.deleteQuietly(actionZip)
    }
    //This log message is used by tests to confirm that actions are installed
    log.info(this, "Installed Route Management Actions")
  }

  private def createActionUpdateCmd(action: Action, name: String, actionZip: File) = {
    Seq(
      wsk,
      "--apihost",
      apiHost.toString(),
      "--auth",
      authKey,
      "action",
      "update",
      s"$namespace/apimgmt/$name",
      actionZip.getAbsolutePath,
      "-a",
      "description",
      action.desc,
      "--kind",
      "nodejs:default",
      "-a",
      "web-export",
      "true",
      "-a",
      "final",
      "true")
  }

  private def packageUpdateCmd = {
    Seq(
      wsk,
      "--apihost",
      apiHost.toString(),
      "--auth",
      authKey,
      "package",
      "update",
      s"$namespace/apimgmt",
      "--shared",
      "no",
      "-a",
      "description",
      "This package manages the gateway API configuration.",
      "-p",
      "gwUrlV2",
      gatewayUrl.toString())
  }

  def wskExists: Boolean = Try(s"$wsk property get --cliversion".!(noopLogger)).getOrElse(-1) == 0
} 
Example 30
Source File: ServiceApp.scala    From BusFloatingData   with Apache License 2.0 5 votes vote down vote up
package de.nierbeck.floating.data.server

import akka.actor.{ActorRef, ActorSystem, Props}
import akka.event.Logging
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.HttpMethods._
import akka.http.scaladsl.model.ws.UpgradeToWebSocket
import akka.http.scaladsl.model.{HttpRequest, HttpResponse, Uri}
import akka.stream.ActorMaterializer
import de.nierbeck.floating.data.server.actors.websocket.{FLINK, RouterActor, SPARK, TiledVehiclesFromKafkaActor}

import scala.concurrent.Await
import scala.concurrent.duration.Duration
import scala.util.{Failure, Success}

object ServiceApp extends RestService {

  import ServiceConfig._
  import system.dispatcher

  implicit val system = ActorSystem("service-api-http")
  implicit val mat = ActorMaterializer()

  override val logger = Logging(system, getClass.getName)
  override val session = CassandraConnector.connect()

  def main(args: Array[String]): Unit = {

    val router: ActorRef = system.actorOf(Props[RouterActor], "router")
    val sparkKafkaConsumer: ActorRef = system.actorOf(TiledVehiclesFromKafkaActor.props(router, "tiledVehicles", SPARK), "Kafka-Consumer-Spark")
    val flinkKafkaConsumer: ActorRef = system.actorOf(TiledVehiclesFromKafkaActor.props(router, "flinkTiledVehicles", FLINK), "Kafka-Consumer-Flink")


    val requestHandler: HttpRequest => HttpResponse = {
      case req@HttpRequest(GET, Uri.Path("/ws/vehicles"), _, _, _) =>
        req.header[UpgradeToWebSocket] match {
          case Some(upgrade) => upgrade.handleMessages(Flows.graphFlowWithStats(router))
          case None => HttpResponse(400, entity = "Not a valid websocket request!")
        }
      case _: HttpRequest => HttpResponse(404, entity = "Unknown resource!")
    }

    Http()
      .bindAndHandle(route(), serviceInterface, servicePort)
      .onComplete {
        case Success(_) => logger.info(s"Successfully bound to $serviceInterface:$servicePort")
        case Failure(e) => logger.error(s"Failed !!!! ${e.getMessage}")
      }

    Http()
      .bindAndHandleSync(requestHandler, serviceInterface, 8001)
      .onComplete {
        case Success(_) => logger.info(s"Successfully started Server to $serviceInterface:8001")
        case Failure(e) => logger.error(s"Failed !!!! ${e.getMessage}")
      }

    Await.ready(system.whenTerminated, Duration.Inf)
    CassandraConnector.close(session)
  }

} 
Example 31
Source File: AkkaHttpBackend.scala    From drunk   with Apache License 2.0 5 votes vote down vote up
package com.github.jarlakxen.drunk.backend

import scala.collection.immutable
import scala.concurrent.{ExecutionContext, Future}
import akka.actor.ActorSystem
import akka.http.scaladsl.{Http, HttpExt}
import akka.http.scaladsl.model.{ContentTypes, HttpEntity, HttpHeader, HttpMethods, HttpRequest, Uri}
import akka.stream.ActorMaterializer

class AkkaHttpBackend private[AkkaHttpBackend] (
  uri: Uri,
  headers: immutable.Seq[HttpHeader],
  httpExt: HttpExt
)(override implicit val as: ActorSystem, override implicit val mat: ActorMaterializer)
    extends AkkaBackend {

  def send(body: String): Future[(Int, String)] = {
    implicit val ec: ExecutionContext = as.dispatcher

    val req = HttpRequest(HttpMethods.POST, uri, headers, HttpEntity(ContentTypes.`application/json`, body))

    val res = httpExt.singleRequest(req)

    res.flatMap { hr =>
      val code = hr.status.intValue()

      val charsetFromHeaders = encodingFromContentType(hr.entity.contentType.toString).getOrElse("utf-8")
      val decodedResponse = decodeResponse(hr)
      val stringBody = bodyToString(decodedResponse, charsetFromHeaders)

      if (code >= 200 && code < 300) {
        stringBody.map { body =>
          hr.discardEntityBytes()
          (code, body)
        }
      } else {
        stringBody.flatMap { body =>
          hr.discardEntityBytes()
          Future.failed(new RuntimeException(s"${uri.toString} return $code with body: $body"))
        }
      }
    }
  }

}

object AkkaHttpBackend {
  val ContentTypeHeader = "Content-Type"

  def apply(
    uri: Uri,
    headers: immutable.Seq[HttpHeader] = Nil,
    httpExt: Option[HttpExt] = None
  )(implicit as: ActorSystem, mat: ActorMaterializer): AkkaHttpBackend = {

    val http = httpExt.getOrElse { Http(as) }
    new AkkaHttpBackend(uri, headers, http)
  }
} 
Example 32
Source File: RestRoute.scala    From akka-http-test   with Apache License 2.0 5 votes vote down vote up
package com.github.dnvriend.component.highlevelserver.route

import akka.actor.ActorRef
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import akka.http.scaladsl.marshalling.ToResponseMarshaller
import akka.http.scaladsl.model.{ StatusCodes, Uri }
import akka.http.scaladsl.server.{ Directives, Route }
import akka.http.scaladsl.unmarshalling.FromRequestUnmarshaller
import akka.pattern.ask
import akka.util.Timeout
import com.github.dnvriend.component.highlevelserver.dto.PersonWithId
import com.github.dnvriend.component.highlevelserver.marshaller.Marshaller
import com.github.dnvriend.component.simpleserver.dto.http.Person

import scala.concurrent.Future

// see: akka.http.scaladsl.marshalling.ToResponseMarshallable
// see: akka.http.scaladsl.marshalling.PredefinedToResponseMarshallers
object RestRoute extends Directives with SprayJsonSupport with Marshaller {
  def routes(personDb: ActorRef)(implicit timeout: Timeout, trmSingle: ToResponseMarshaller[PersonWithId], trmList: ToResponseMarshaller[List[PersonWithId]], fru: FromRequestUnmarshaller[Person]): Route = {
    pathEndOrSingleSlash {
      redirect(Uri("/api/person"), StatusCodes.PermanentRedirect)
    } ~
      pathPrefix("api" / "person") {
        get {
          path(IntNumber) { id =>
            println(s"PathEndsInNumber=$id")
            complete((personDb ? "findAll").mapTo[List[PersonWithId]])
          } ~
            pathEndOrSingleSlash {
              parameter("foo") { foo =>
                println(s"foo=$foo")
                complete((personDb ? "findAll").mapTo[List[PersonWithId]])
              } ~
                parameter('bar) { bar =>
                  println(s"bar=$bar")
                  complete((personDb ? "findAll").mapTo[List[PersonWithId]])
                } ~
                complete((personDb ? "findAll").mapTo[List[PersonWithId]])
            }
        } ~
          (post & pathEndOrSingleSlash & entity(as[Person])) { person =>
            complete((personDb ? person).mapTo[PersonWithId])
          }
      } ~
      path("failure") {
        pathEnd {
          complete(Future.failed[String](new RuntimeException("Simulated Failure")))
        }
      } ~
      path("success") {
        pathEnd {
          complete(Future.successful("Success!!"))
        }
      }
  }
} 
Example 33
Source File: EndpointsSettings.scala    From endpoints4s   with MIT License 5 votes vote down vote up
package endpoints4s.akkahttp.client

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpEntity, HttpRequest, HttpResponse, Uri}
import akka.stream.Materializer
import akka.stream.scaladsl.{Flow, Sink, Source}

import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.Try

final case class EndpointsSettings(
    requestExecutor: AkkaHttpRequestExecutor,
    baseUri: Uri = Uri("/"),
    toStrictTimeout: FiniteDuration = 2.seconds,
    stringContentExtractor: HttpEntity.Strict => String = _.data.utf8String
)

trait AkkaHttpRequestExecutor {
  def apply(request: HttpRequest): Future[HttpResponse]
}

object AkkaHttpRequestExecutor {
  def cachedHostConnectionPool(host: String, port: Int)(implicit
      system: ActorSystem,
      materializer: Materializer
  ): AkkaHttpRequestExecutor =
    default(Http().cachedHostConnectionPool[Int](host, port))

  def default(
      poolClientFlow: Flow[
        (HttpRequest, Int),
        (Try[HttpResponse], Int),
        Http.HostConnectionPool
      ]
  )(implicit materializer: Materializer): AkkaHttpRequestExecutor =
    new AkkaHttpRequestExecutor {
      override def apply(request: HttpRequest): Future[HttpResponse] =
        Source
          .single(request -> 1)
          .via(poolClientFlow)
          .map(_._1.get)
          .runWith(Sink.head)
    }
} 
Example 34
Source File: package.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.client

import java.net.URLEncoder

import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.Uri.Path
import com.wix.e2e.http.BaseUri

package object internals {

  implicit class `BaseUri --> akka.Uri`(private val u: BaseUri) extends AnyVal {
    def asUri: Uri = asUriWith("")
    def asUriWith(relativeUrl: String): Uri =
      if (relativeUrl.contains('?'))
        urlWithoutParams(relativeUrl).withRawQueryString( extractParamsFrom(relativeUrl) )
      else urlWithoutParams(relativeUrl)


    private def fixPath(url: Option[String]) = {
      url.map( _.trim )
         .map( u => s"/${u.stripPrefix("/")}" )
         .filterNot( _.equals("/") )
         .map( Path(_) )
         .getOrElse( Path.Empty )
    }

    private def buildPath(context: Option[String], relativePath: Option[String]) = {
      val c = fixPath(context)
      val r = fixPath(relativePath)
      c ++ r
    }

    private def urlWithoutParams(relativeUrl: String) =
      Uri(scheme = "http").withHost(u.host)
                          .withPort(u.port)
                          .withPath( buildPath(u.contextRoot, Option(extractPathFrom(relativeUrl))) )

    private def extractPathFrom(relativeUrl: String) = relativeUrl.split('?').head

    private def extractParamsFrom(relativeUrl: String) =
      rebuildAndEscapeParams(relativeUrl.substring(relativeUrl.indexOf('?') + 1))

    private def rebuildAndEscapeParams(p: String) =
      p.split("&")
       .map( _.split("=") )
       .map( { case Array(k, v) => s"$k=${URLEncoder.encode(v, "UTF-8")}"
          case Array(k) => k
       } )
       .mkString("&")
  }
} 
Example 35
Source File: PathBuilderTestSupport.scala    From wix-http-testkit   with MIT License 5 votes vote down vote up
package com.wix.e2e.http.client.drivers

import akka.http.scaladsl.model.Uri
import com.wix.e2e.http.BaseUri
import com.wix.test.random.{randomInt, randomStr}
import org.specs2.matcher.Matcher
import org.specs2.matcher.Matchers._


trait PathBuilderTestSupport {
  val contextRoot = s"/$randomStr"
  val contextRootWithMultiplePaths = s"/$randomStr/$randomStr/$randomStr"
  val relativePath = s"/$randomStr"
  val relativePathWithMultipleParts = s"/$randomStr/$randomStr/$randomStr"
  val baseUri = BaseUriGen.random
  val escapedCharacters = "!'();:@+$,/?%#[]\"'/\\" //&=
}

object BaseUriGen {
  def random: BaseUri = BaseUri(randomStr.toLowerCase, randomInt(1, 65536), Some(s"/$randomStr"))
}

object UrlBuilderMatchers {
  def beUrl(url: String): Matcher[Uri] = be_===(url) ^^ { (_: Uri).toString }
} 
Example 36
Source File: BitcoinerLiveFeeRateProvider.scala    From bitcoin-s   with MIT License 5 votes vote down vote up
package org.bitcoins.feeprovider

import akka.actor.ActorSystem
import akka.http.scaladsl.model.Uri
import org.bitcoins.commons.jsonmodels.wallet.BitcoinerLiveResult
import org.bitcoins.commons.serializers.JsonSerializers._
import org.bitcoins.core.wallet.fee.SatoshisPerVirtualByte
import play.api.libs.json.{JsError, JsSuccess, Json}

import scala.util.{Failure, Success, Try}

case class BitcoinerLiveFeeRateProvider(minutes: Int)(implicit
    override val system: ActorSystem)
    extends CachedHttpFeeRateProvider {

  private val bitcoinerLiveValidMinutes =
    Vector(30, 60, 120, 180, 360, 720, 1440)
  require(
    bitcoinerLiveValidMinutes.contains(minutes),
    s"$minutes is not a valid selection, must be from $bitcoinerLiveValidMinutes")

  override val uri: Uri =
    Uri("https://bitcoiner.live/api/fees/estimates/latest")

  override def converter(str: String): Try[SatoshisPerVirtualByte] = {
    val json = Json.parse(str)
    json.validate[BitcoinerLiveResult] match {
      case JsSuccess(response, _) =>
        Success(response.estimates(minutes).sat_per_vbyte)
      case JsError(error) =>
        Failure(
          new RuntimeException(
            s"Unexpected error when parsing response: $error"))
    }
  }
} 
Example 37
Source File: HttpFeeRateProvider.scala    From bitcoin-s   with MIT License 5 votes vote down vote up
package org.bitcoins.feeprovider

import java.time.{Duration, Instant}

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, Uri}
import akka.util.ByteString
import org.bitcoins.core.api.FeeRateApi
import org.bitcoins.core.util.TimeUtil
import org.bitcoins.core.wallet.fee.FeeUnit

import scala.concurrent.{ExecutionContextExecutor, Future}
import scala.util.Try

object HttpFeeRateProvider {

  def makeApiCall(uri: Uri)(implicit system: ActorSystem): Future[String] = {
    implicit val ec: ExecutionContextExecutor = system.dispatcher
    Http()
      .singleRequest(HttpRequest(uri = uri))
      .flatMap(response =>
        response.entity.dataBytes
          .runFold(ByteString.empty)(_ ++ _)
          .map(payload => payload.decodeString(ByteString.UTF_8)))
  }
}

abstract class HttpFeeRateProvider extends FeeRateApi {
  implicit protected val system: ActorSystem

  protected def uri: Uri

  protected def converter(str: String): Try[FeeUnit]

  def getFeeRate: Future[FeeUnit] = {
    HttpFeeRateProvider
      .makeApiCall(uri)
      .flatMap(ret => Future.fromTry(converter(ret)))(system.dispatcher)
  }
}

abstract class CachedHttpFeeRateProvider extends HttpFeeRateProvider {

  private var cachedFeeRateOpt: Option[(FeeUnit, Instant)] = None

  val cacheDuration: Duration = Duration.ofMinutes(5)

  private def updateFeeRate(): Future[FeeUnit] = {
    implicit val ec: ExecutionContextExecutor = system.dispatcher
    super.getFeeRate.map { feeRate =>
      cachedFeeRateOpt = Some(feeRate, TimeUtil.now)
      feeRate
    }
  }

  override def getFeeRate: Future[FeeUnit] = {
    cachedFeeRateOpt match {
      case None =>
        updateFeeRate()
      case Some((cachedFeeRate, time)) =>
        val now = TimeUtil.now
        if (time.plus(cacheDuration).isAfter(now)) {
          updateFeeRate()
        } else {
          Future.successful(cachedFeeRate)
        }
    }
  }
} 
Example 38
Source File: LmgtfyBot.scala    From telegram   with Apache License 2.0 5 votes vote down vote up
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.Uri.Query
import cats.instances.future._
import cats.syntax.functor._
import com.bot4s.telegram.Implicits._
import com.bot4s.telegram.api.declarative.{Commands, InlineQueries}
import com.bot4s.telegram.future.Polling
import com.bot4s.telegram.methods.ParseMode
import com.bot4s.telegram.models._

import scala.concurrent.Future


class LmgtfyBot(token: String) extends ExampleBot(token)
  with Polling
  with InlineQueries[Future]
  with Commands[Future] {

  def lmgtfyBtn(query: String): InlineKeyboardMarkup = InlineKeyboardMarkup.singleButton(
    InlineKeyboardButton.url("\uD83C\uDDECoogle it now!", lmgtfyUrl(query)))

  onCommand('start | 'help) { implicit msg =>
    reply(
      s"""Generates ${"Let me \uD83C\uDDECoogle that for you!".italic} links.
         |
         |/start | /help - list commands
         |
         |/lmgtfy args - generate link
         |
         |/lmgtfy2 | /btn args - clickable button
         |
         |@Bot args - Inline mode
      """.stripMargin,
      parseMode = ParseMode.Markdown).void
  }

  onCommand('lmgtfy) { implicit msg =>
    withArgs { args =>
      val query = args.mkString(" ")

      replyMd(
        query.altWithUrl(lmgtfyUrl(query)),
        disableWebPagePreview = true
      ).void
    }
  }

  def lmgtfyUrl(query: String): String =
    Uri("http://lmgtfy.com")
      .withQuery(Query("q" -> query))
      .toString()

  onCommand('btn | 'lmgtfy2) { implicit msg =>
    withArgs { args =>
      val query = args.mkString(" ")
      reply(query, replyMarkup = lmgtfyBtn(query)).void
    }
  }

  onInlineQuery { implicit iq =>
    val query = iq.query

    if (query.isEmpty)
      answerInlineQuery(Seq()).void
    else {

      val textMessage = InputTextMessageContent(
        query.altWithUrl(lmgtfyUrl(query)),
        disableWebPagePreview = true,
        parseMode = ParseMode.Markdown)

      val results = List(
        InlineQueryResultArticle(
          "btn:" + query,
          inputMessageContent = textMessage,
          title = iq.query,
          description = "Clickable button + link",
          replyMarkup = lmgtfyBtn(query)
        ),
        InlineQueryResultArticle(
          query,
          inputMessageContent = textMessage,
          description = "Clickable link",
          title = iq.query
        )
      )

      answerInlineQuery(results, cacheTime = 1).void
    }
  }
} 
Example 39
Source File: QrCodesBot.scala    From telegram   with Apache License 2.0 5 votes vote down vote up
import java.net.URLEncoder

import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, Uri}
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.util.ByteString
import com.bot4s.telegram.api.declarative.Commands
import com.bot4s.telegram.api._
import com.bot4s.telegram.future.Polling
import com.bot4s.telegram.methods._
import com.bot4s.telegram.models.AkkaInputFile

import scala.concurrent.Future


class QrCodesBot(token: String) extends AkkaExampleBot(token)
  with Polling
  with Commands[Future]
  with ChatActions[Future] {

  // Multiple variants
  onCommand('qr | 'qrcode | 'qr_code) { implicit msg =>
    withArgs { args =>
      val url = "https://api.qrserver.com/v1/create-qr-code/?data=" +
        URLEncoder.encode(args mkString " ", "UTF-8")

      for {
        response <- Http().singleRequest(HttpRequest(uri = Uri(url)))
        if response.status.isSuccess()
        bytes <- Unmarshal(response).to[ByteString]
        photo = AkkaInputFile("qrcode.png", bytes)
        _ <- uploadingPhoto // Hint the user
        _ <- request(SendPhoto(msg.source, photo))
      } yield ()
    }
  }
} 
Example 40
Source File: VoiceFileBot.scala    From telegram   with Apache License 2.0 5 votes vote down vote up
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, Uri}
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.util.ByteString
import cats.instances.future._
import cats.syntax.functor._
import com.bot4s.telegram.api.declarative.Commands
import com.bot4s.telegram.future.Polling
import com.bot4s.telegram.methods._

import scala.concurrent.Future
import scala.util.{Failure, Success}


class VoiceFileBot(token: String) extends AkkaExampleBot(token)
  with Polling
  with Commands[Future] {

  onMessage { implicit msg =>

    using(_.voice) { voice =>
      request(GetFile(voice.fileId)).andThen({
        case Success(file) =>
          file.filePath match {

            case Some(filePath) =>
              // See https://core.telegram.org/bots/api#getfile
              val url = s"https://api.telegram.org/file/bot${token}/${filePath}"

              for {
                res <- Http().singleRequest(HttpRequest(uri = Uri(url)))
                if res.status.isSuccess()
                bytes <- Unmarshal(res).to[ByteString]
                _ <- reply(s"File with ${bytes.size} bytes received.")
              } yield ()
            case None =>
              reply("No file_path was returned")
          }

        case Failure(e) =>
          logger.error("Exception: " + e) // poor's man logging
      }).void
    }
  }
} 
Example 41
Source File: GitHubHosted2048Bot.scala    From telegram   with Apache License 2.0 5 votes vote down vote up
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.Uri.{Path, Query}
import akka.http.scaladsl.model.headers.{HttpOrigin, HttpOriginRange}

import ch.megard.akka.http.cors.scaladsl.model.{HttpHeaderRange, HttpOriginMatcher}

import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import cats.instances.future._
import cats.syntax.functor._
import ch.megard.akka.http.cors.scaladsl.CorsDirectives.cors
import ch.megard.akka.http.cors.scaladsl.settings.CorsSettings
import com.bot4s.telegram.api.declarative.{Callbacks, Commands}
import com.bot4s.telegram.api.{GameManager, Payload}
import com.bot4s.telegram.future.Polling
import com.bot4s.telegram.methods.SendGame

import scala.concurrent.Future


class GitHubHosted2048Bot(token: String, gameManagerHost: String)
  extends AkkaExampleBot(token)
    with Polling
    with Commands[Future]
    with Callbacks[Future]
    with GameManager {

  override val port: Int = 8080

  val Play2048 = "play_2048"
  val GitHubPages = Uri("https://mukel.github.io")

  onCommand(Play2048 or "2048" or "start") { implicit msg =>
    request(
      SendGame(msg.source, Play2048)
    ).void
  }

  onCallbackQuery { implicit cbq =>
    val acked = cbq.gameShortName.collect {
      case Play2048 =>
        val payload = Payload.forCallbackQuery(gameManagerHost)

        val url = GitHubPages
          .withPath(Path(s"/$Play2048/index.html"))
          .withQuery(Query("payload" -> payload.base64Encode))

        ackCallback(url = Some(url.toString()))
    }

    acked.getOrElse(ackCallback()).void
  }

  // Enable CORS for GitHub Pages.
  // Allows GitHub Pages to call cross-domain getScores and setScore.
  private val allowGitHub = CorsSettings.defaultSettings
    .withAllowedOrigins(HttpOriginMatcher(HttpOrigin(GitHubPages.toString())))

  override def routes: Route =
    super.routes ~
      cors(allowGitHub) {
        gameManagerRoute
      }
} 
Example 42
Source File: WebhookBot.scala    From telegram   with Apache License 2.0 5 votes vote down vote up
import java.net.URLEncoder

import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, Uri}
import akka.http.scaladsl.unmarshalling.Unmarshal
import com.bot4s.telegram.api.Webhook
import com.bot4s.telegram.methods._
import com.bot4s.telegram.models.Message

import scala.concurrent.Future


class WebhookBot(token: String) extends AkkaExampleBot(token) with Webhook {

  val port = 8080
  val webhookUrl = "https://88c444ab.ngrok.io"

  val baseUrl = "http://api.mathjs.org/v1/?expr="

  override def receiveMessage(msg: Message): Future[Unit] = {
    msg.text.fold(Future.successful(())) { text =>
      val url = baseUrl + URLEncoder.encode(text, "UTF-8")
      for {
        res <- Http().singleRequest(HttpRequest(uri = Uri(url)))
        if res.status.isSuccess()
        result <- Unmarshal(res).to[String]
        _ <- request(SendMessage(msg.source, result))
      } yield ()
    }
  }
} 
Example 43
Source File: SelfHosted2048Bot.scala    From telegram   with Apache License 2.0 5 votes vote down vote up
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.Uri.{Path, Query}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import cats.instances.future._
import cats.syntax.functor._
import com.bot4s.telegram.api.declarative.{Callbacks, Commands}
import com.bot4s.telegram.api.{AkkaDefaults, GameManager, Payload}
import com.bot4s.telegram.future.Polling
import com.bot4s.telegram.methods.SendGame

import scala.concurrent.Future


class SelfHosted2048Bot(token: String, gameManagerHost: String)
  extends ExampleBot(token)
    with Polling
    with AkkaDefaults
    with Callbacks[Future]
    with GameManager
    with Commands[Future] {

  override val port: Int = 8080

  val Play2048 = "play_2048"

  onCommand(Play2048 or "2048" or "start") { implicit msg =>
    request(
      SendGame(msg.source, Play2048)
    ).void
  }

  onCallbackQuery { implicit cbq =>
    val acked = cbq.gameShortName.collect {
      case Play2048 =>
        val payload = Payload.forCallbackQuery(gameManagerHost)

        val url = Uri(gameManagerHost)
          .withPath(Path(s"/$Play2048/index.html"))
          .withQuery(Query("payload" -> payload.base64Encode))

        ackCallback(url = Some(url.toString()))
    }

    acked.getOrElse(ackCallback()).void
  }

  override def routes: Route =
    super.routes ~
      gameManagerRoute ~ {
      pathPrefix(Play2048) {
        getFromResourceDirectory(Play2048)
      }
    }
} 
Example 44
Source File: LoadTest.scala    From ws_to_kafka   with MIT License 5 votes vote down vote up
package com.pkinsky

import java.util.concurrent.atomic.AtomicInteger

import akka.http.scaladsl.model.ws.{InvalidUpgradeResponse, WebsocketUpgradeResponse, WebsocketRequest, TextMessage}
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.Uri
import akka.stream.ThrottleMode
import akka.stream.scaladsl.{Keep, Sink, RunnableGraph, Source}
import play.api.libs.json.Json

import scala.concurrent.{Future, Await}
import scala.concurrent.duration._
import scala.language.postfixOps

object LoadTest extends App with AppContext {
  val clients = 256
  val eventsPerClient = 256

  val eventsSent = new AtomicInteger(0)

  def testData(clientId: String): Source[Event, Unit] =
    Source.unfoldInf(1) { n =>
      val event = Event(s"msg number $n", clientId, System.currentTimeMillis())
      (n + 1, event)
    }.take(eventsPerClient).throttle(1, 100 millis, 1, ThrottleMode.Shaping)

  def wsClient(clientId: String): RunnableGraph[Future[WebsocketUpgradeResponse]] =
    testData(clientId).map(e => TextMessage.Strict(Json.toJson(e).toString))
      .map { x => eventsSent.incrementAndGet(); x }
      .viaMat(Http().websocketClientFlow(WebsocketRequest(Uri(s"ws://localhost:$port/ws"))))(Keep.right).to(Sink.ignore)

  //set up websocket connections
  (1 to clients).foreach { id =>
    wsClient(s"client $id").run()
  }

  //watch kafka for messages sent via websocket
  val kafkaConsumerGraph: RunnableGraph[Future[Seq[Event]]] =
    kafka.consume[Event](eventTopic, "group_new")
      .take(clients * eventsPerClient).takeWithin(2 minutes)
      .toMat(Sink.seq)(Keep.right)

  val res = Await.result(kafkaConsumerGraph.run, 5 minutes)
  println(s"sent ${eventsSent.get()} events total")
  println(s"res size: ${res.length}")
} 
Example 45
Source File: ClusterBootstrap.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management.cluster.bootstrap

import java.util.concurrent.atomic.AtomicReference

import akka.AkkaVersion
import scala.concurrent.{ Future, Promise, TimeoutException }
import scala.concurrent.duration._

import akka.actor.ActorSystem
import akka.actor.ClassicActorSystemProvider
import akka.actor.ExtendedActorSystem
import akka.actor.Extension
import akka.actor.ExtensionId
import akka.actor.ExtensionIdProvider
import akka.annotation.InternalApi
import akka.cluster.Cluster
import akka.discovery.{ Discovery, ServiceDiscovery }
import akka.event.Logging
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.server.Route
import akka.management.cluster.bootstrap.contactpoint.HttpClusterBootstrapRoutes
import akka.management.cluster.bootstrap.internal.BootstrapCoordinator
import akka.management.scaladsl.ManagementRouteProviderSettings
import akka.management.scaladsl.ManagementRouteProvider

final class ClusterBootstrap(implicit system: ExtendedActorSystem) extends Extension with ManagementRouteProvider {

  import ClusterBootstrap.Internal._
  import system.dispatcher

  private val log = Logging(system, classOf[ClusterBootstrap])

  private final val bootstrapStep = new AtomicReference[BootstrapStep](NotRunning)

  AkkaVersion.require("cluster-bootstrap", "2.5.27")

  val settings: ClusterBootstrapSettings = ClusterBootstrapSettings(system.settings.config, log)

  // used for initial discovery of contact points
  lazy val discovery: ServiceDiscovery =
    settings.contactPointDiscovery.discoveryMethod match {
      case "akka.discovery" =>
        val discovery = Discovery(system).discovery
        log.info("Bootstrap using default `akka.discovery` method: {}", Logging.simpleName(discovery))
        discovery

      case otherDiscoveryMechanism =>
        log.info("Bootstrap using `akka.discovery` method: {}", otherDiscoveryMechanism)
        Discovery(system).loadServiceDiscovery(otherDiscoveryMechanism)
    }

  private val joinDecider: JoinDecider = {
    system.dynamicAccess
      .createInstanceFor[JoinDecider](
        settings.joinDecider.implClass,
        List((classOf[ActorSystem], system), (classOf[ClusterBootstrapSettings], settings))
      )
      .get
  }

  private[this] val _selfContactPointUri: Promise[Uri] = Promise()

  override def routes(routeProviderSettings: ManagementRouteProviderSettings): Route = {
    log.info(s"Using self contact point address: ${routeProviderSettings.selfBaseUri}")
    this.setSelfContactPoint(routeProviderSettings.selfBaseUri)

    new HttpClusterBootstrapRoutes(settings).routes
  }

  def start(): Unit =
    if (Cluster(system).settings.SeedNodes.nonEmpty) {
      log.warning(
        "Application is configured with specific `akka.cluster.seed-nodes`: {}, bailing out of the bootstrap process! " +
        "If you want to use the automatic bootstrap mechanism, make sure to NOT set explicit seed nodes in the configuration. " +
        "This node will attempt to join the configured seed nodes.",
        Cluster(system).settings.SeedNodes.mkString("[", ", ", "]")
      )
    } else if (bootstrapStep.compareAndSet(NotRunning, Initializing)) {
      log.info("Initiating bootstrap procedure using {} method...", settings.contactPointDiscovery.discoveryMethod)

      ensureSelfContactPoint()
      val bootstrapProps = BootstrapCoordinator.props(discovery, joinDecider, settings)
      val bootstrap = system.systemActorOf(bootstrapProps, "bootstrapCoordinator")
      // Bootstrap already logs in several other execution points when it can't form a cluster, and why.
      selfContactPoint.foreach { uri =>
        bootstrap ! BootstrapCoordinator.Protocol.InitiateBootstrapping(uri)
      }
    } else log.warning("Bootstrap already initiated, yet start() method was called again. Ignoring.")

  
  private[bootstrap] object Internal {
    sealed trait BootstrapStep
    case object NotRunning extends BootstrapStep
    case object Initializing extends BootstrapStep
  }

} 
Example 46
Source File: HttpClusterBootstrapRoutes.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management.cluster.bootstrap.contactpoint

import scala.concurrent.duration._

import akka.actor.ActorSystem
import akka.cluster.Cluster
import akka.cluster.Member
import akka.event.Logging
import akka.event.LoggingAdapter
import akka.http.javadsl.server.directives.RouteAdapter
import akka.http.scaladsl.model.HttpRequest
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.server.Route
import akka.management.cluster.bootstrap.ClusterBootstrapSettings
import akka.management.cluster.bootstrap.contactpoint.HttpBootstrapJsonProtocol.ClusterMember
import akka.management.cluster.bootstrap.contactpoint.HttpBootstrapJsonProtocol.SeedNodes

final class HttpClusterBootstrapRoutes(settings: ClusterBootstrapSettings) extends HttpBootstrapJsonProtocol {

  import akka.http.scaladsl.server.Directives._

  private def routeGetSeedNodes: Route = extractClientIP { clientIp =>
    extractActorSystem { implicit system =>
      import akka.cluster.MemberStatus
      val cluster = Cluster(system)

      def memberToClusterMember(m: Member): ClusterMember =
        ClusterMember(m.uniqueAddress.address, m.uniqueAddress.longUid, m.status.toString, m.roles)

      val state = cluster.state

      // TODO shuffle the members so in a big deployment nodes start joining different ones and not all the same?
      val members = state.members
        .diff(state.unreachable)
        .filter(m =>
          m.status == MemberStatus.up || m.status == MemberStatus.weaklyUp || m.status == MemberStatus.joining)
        .take(settings.contactPoint.httpMaxSeedNodesToExpose)
        .map(memberToClusterMember)

      val info = SeedNodes(cluster.selfMember.uniqueAddress.address, members)
      log.info(
        "Bootstrap request from {}: Contact Point returning {} seed-nodes [{}]",
        clientIp,
        members.size,
        members.map(_.node).mkString(", "))
      complete(info)
    }
  }

  
  def getRoutes: akka.http.javadsl.server.Route = RouteAdapter(routes)

  private def log(implicit sys: ActorSystem): LoggingAdapter =
    Logging(sys, classOf[HttpClusterBootstrapRoutes])

}

object ClusterBootstrapRequests {

  import akka.http.scaladsl.client.RequestBuilding._

  def bootstrapSeedNodes(baseUri: Uri): HttpRequest =
    Get(baseUri + "/bootstrap/seed-nodes")

} 
Example 47
Source File: ManagementRouteProviderSettings.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management.scaladsl

import java.util.Optional
import java.util.concurrent.CompletionStage
import java.util.function.{ Function => JFunction }

import akka.annotation.DoNotInherit
import akka.annotation.InternalApi
import akka.http.javadsl.server.directives.SecurityDirectives.ProvidedCredentials
import akka.http.scaladsl.HttpsConnectionContext
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.server.Directives.AsyncAuthenticator
import akka.management.javadsl

object ManagementRouteProviderSettings {
  def apply(selfBaseUri: Uri, readOnly: Boolean): ManagementRouteProviderSettings = {
    ManagementRouteProviderSettingsImpl(selfBaseUri, None, None, None, readOnly = readOnly)
  }
}


@InternalApi private[akka] final case class ManagementRouteProviderSettingsImpl(
    override val selfBaseUri: Uri,
    scaladslAuth: Option[AsyncAuthenticator[String]],
    javadslAuth: Option[JFunction[Optional[ProvidedCredentials], CompletionStage[Optional[String]]]],
    override val httpsConnectionContext: Option[HttpsConnectionContext],
    override val readOnly: Boolean
) extends ManagementRouteProviderSettings {

  // There is no public API for defining both so it should not be possible
  require(!(javadslAuth.isDefined && scaladslAuth.isDefined), "Defining both javadsl and scaladsl auth is not allowed")

  override def withAuth(newAuth: AsyncAuthenticator[String]): ManagementRouteProviderSettings =
    copy(scaladslAuth = Option(newAuth))

  override def withHttpsConnectionContext(
      newHttpsConnectionContext: HttpsConnectionContext): ManagementRouteProviderSettings =
    copy(selfBaseUri = selfBaseUri.withScheme("https"), httpsConnectionContext = Option(newHttpsConnectionContext))

  def javadslHttpsConnectionContext: Optional[akka.http.javadsl.HttpsConnectionContext] =
    httpsConnectionContext match {
      case None      => Optional.empty()
      case Some(ctx) => Optional.of(ctx) // a scaladsl.HttpsConnectionContext is a javadsl.HttpsConnectionContext
    }

  override def withReadOnly(readOnly: Boolean): ManagementRouteProviderSettings = copy(readOnly = readOnly)

  def asJava: javadsl.ManagementRouteProviderSettingsImpl =
    javadsl.ManagementRouteProviderSettingsImpl(
      selfBaseUri = akka.http.javadsl.model.Uri.create(selfBaseUri),
      javadslAuth,
      scaladslAuth,
      javadslHttpsConnectionContext,
      readOnly)

} 
Example 48
Source File: HealthCheckRoutesSpec.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management

import akka.actor.ExtendedActorSystem
import akka.http.scaladsl.model.{ StatusCodes, Uri }
import akka.http.scaladsl.server._
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.management.scaladsl.{ HealthChecks, ManagementRouteProviderSettings }
import org.scalatest.{ Matchers, WordSpec }

import scala.concurrent.Future

class HealthCheckRoutesSpec extends WordSpec with Matchers with ScalatestRouteTest {

  private val eas = system.asInstanceOf[ExtendedActorSystem]

  private def testRoute(
      readyResultValue: Future[Either[String, Unit]] = Future.successful(Right(())),
      aliveResultValue: Future[Either[String, Unit]] = Future.successful(Right(()))
  ): Route = {
    new HealthCheckRoutes(eas) {
      override protected val healthChecks: HealthChecks = new HealthChecks {
        override def readyResult(): Future[Either[String, Unit]] = readyResultValue
        override def ready(): Future[Boolean] = readyResultValue.map(_.isRight)
        override def aliveResult(): Future[Either[String, Unit]] = aliveResultValue
        override def alive(): Future[Boolean] = aliveResultValue.map(_.isRight)
      }
    }.routes(ManagementRouteProviderSettings(Uri("http://whocares"), readOnly = false))
  }

  tests("/ready", result => testRoute(readyResultValue = result))
  tests("/alive", result => testRoute(aliveResultValue = result))

  def tests(endpoint: String, route: Future[Either[String, Unit]] => Route) = {
    s"Health check ${endpoint} endpoint" should {
      "return 200 for Right" in {
        Get(endpoint) ~> route(Future.successful(Right(()))) ~> check {
          status shouldEqual StatusCodes.OK
        }
      }
      "return 500 for Left" in {
        Get(endpoint) ~> route(Future.successful(Left("com.someclass.MyCheck"))) ~> check {
          status shouldEqual StatusCodes.InternalServerError
          responseAs[String] shouldEqual "Not Healthy: com.someclass.MyCheck"
        }
      }
      "return 500 for fail" in {
        Get(endpoint) ~> route(Future.failed(new RuntimeException("darn it"))) ~> check {
          status shouldEqual StatusCodes.InternalServerError
          responseAs[String] shouldEqual "Health Check Failed: darn it"
        }
      }
    }
  }
} 
Example 49
Source File: LogLevelRoutesSpec.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management.loglevels.logback

import akka.actor.ExtendedActorSystem
import akka.http.javadsl.server.MalformedQueryParamRejection
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.management.scaladsl.ManagementRouteProviderSettings
import org.scalatest.Matchers
import org.scalatest.WordSpec
import org.slf4j.LoggerFactory
import akka.event.{ Logging => ClassicLogging }

class LogLevelRoutesSpec extends WordSpec with Matchers with ScalatestRouteTest {

  override def testConfigSource: String =
    """
      akka.loglevel = INFO
      """

  val routes = LogLevelRoutes
    .createExtension(system.asInstanceOf[ExtendedActorSystem])
    .routes(ManagementRouteProviderSettings(Uri("https://example.com"), readOnly = false))

  "The logback log level routes" must {

    "show log level of a Logger" in {
      Get("/loglevel/logback?logger=LogLevelRoutesSpec") ~> routes ~> check {
        responseAs[String]
      }
    }

    "change log level of a Logger" in {
      Put("/loglevel/logback?logger=LogLevelRoutesSpec&level=DEBUG") ~> routes ~> check {
        response.status should ===(StatusCodes.OK)
        LoggerFactory.getLogger("LogLevelRoutesSpec").isDebugEnabled should ===(true)
      }
    }

    "fail for unknown log level" in {
      Put("/loglevel/logback?logger=LogLevelRoutesSpec&level=MONKEY") ~> routes ~> check {
        rejection shouldBe an[MalformedQueryParamRejection]
      }
    }

    "not change loglevel if read only" in {
      val readOnlyRoutes = LogLevelRoutes
        .createExtension(system.asInstanceOf[ExtendedActorSystem])
        .routes(ManagementRouteProviderSettings(Uri("https://example.com"), readOnly = true))
      Put("/loglevel/logback?logger=LogLevelRoutesSpec&level=DEBUG") ~> readOnlyRoutes ~> check {
        response.status should ===(StatusCodes.Forbidden)
      }
    }

    "allow inspecting classic Akka loglevel" in {
      Get("/loglevel/akka") ~> routes ~> check {
        response.status should ===(StatusCodes.OK)
        responseAs[String] should ===("INFO")
      }
    }

    "allow changing classic Akka loglevel" in {
      Put("/loglevel/akka?level=DEBUG") ~> routes ~> check {
        response.status should ===(StatusCodes.OK)
        system.eventStream.logLevel should ===(ClassicLogging.DebugLevel)
      }
    }
  }

} 
Example 50
Source File: ClusterHttpManagementRouteProviderSpec.scala    From akka-management   with Apache License 2.0 5 votes vote down vote up
package akka.management.cluster

import akka.actor.ExtendedActorSystem
import akka.cluster.Cluster
import akka.http.scaladsl.model.{ StatusCodes, Uri }
import akka.http.scaladsl.testkit.ScalatestRouteTest
import akka.management.scaladsl.ManagementRouteProviderSettings
import org.scalatest.{ Matchers, WordSpec }

object ClusterHttpManagementRouteProviderSpec {}

class ClusterHttpManagementRouteProviderSpec extends WordSpec with ScalatestRouteTest with Matchers {

  val cluster = Cluster(system)

  "Cluster HTTP Management Route" should {
    val routes = ClusterHttpManagementRouteProvider(
      system.asInstanceOf[ExtendedActorSystem]
    )
    "not expose write operations when readOnly set" in {
      val readOnlyRoutes = routes.routes(
        ManagementRouteProviderSettings(
          Uri("http://localhost"),
          readOnly = true
        )
      )
      Get("/cluster/members") ~> readOnlyRoutes ~> check {
        handled shouldEqual true
        status shouldEqual StatusCodes.OK
      }
      Post("/cluster/members") ~> readOnlyRoutes ~> check {
        status shouldEqual StatusCodes.MethodNotAllowed
      }
      Get("/cluster/members/member1") ~> readOnlyRoutes ~> check {
        handled shouldEqual true
        status shouldEqual StatusCodes.NotFound
      }
      Delete("/cluster/members/member1") ~> readOnlyRoutes ~> check {
        status shouldEqual StatusCodes.MethodNotAllowed
      }
      Put("/cluster/members/member1") ~> readOnlyRoutes ~> check {
        status shouldEqual StatusCodes.MethodNotAllowed
      }
    }

    "expose write when readOnly false" in {
      val allRoutes = routes.routes(
        ManagementRouteProviderSettings(
          Uri("http://localhost"),
          readOnly = false
        )
      )
      Get("/cluster/members") ~> allRoutes ~> check {
        handled shouldEqual true
      }
      Get("/cluster/members/member1") ~> allRoutes ~> check {
        handled shouldEqual true
        status shouldEqual StatusCodes.NotFound
      }
      Delete("/cluster/members/member1") ~> allRoutes ~> check {
        handled shouldEqual true
        status shouldEqual StatusCodes.NotFound
      }
      Put("/cluster/members/member1") ~> allRoutes ~> check {
        handled shouldEqual true
        status shouldEqual StatusCodes.NotFound
      }
    }
  }

} 
Example 51
Source File: AkkaQueryBuilder.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka.shared.handlers

import akka.http.scaladsl.model.Uri
import com.github.fsanaulla.chronicler.core.components.QueryBuilder
import com.github.fsanaulla.chronicler.core.model.InfluxCredentials


private[akka] class AkkaQueryBuilder(
    schema: String,
    host: String,
    port: Int,
    credentials: Option[InfluxCredentials])
  extends QueryBuilder[Uri](credentials) {

  override def buildQuery(url: String): Uri =
    Uri.from(
      schema,
      host = host,
      port = port,
      path = url
    )

  override def buildQuery(url: String, queryParams: List[(String, String)]): Uri =
    buildQuery(url).withQuery(Uri.Query(queryParams: _*))
} 
Example 52
Source File: AkkaManagementClient.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka.management

import _root_.akka.actor.ActorSystem
import _root_.akka.http.scaladsl.HttpsConnectionContext
import akka.http.scaladsl.model.{HttpResponse, RequestEntity, Uri}
import akka.stream.ActorMaterializer
import com.github.fsanaulla.chronicler.akka.shared.InfluxAkkaClient
import com.github.fsanaulla.chronicler.akka.shared.handlers._
import com.github.fsanaulla.chronicler.core.ManagementClient
import com.github.fsanaulla.chronicler.core.alias.ErrorOr
import com.github.fsanaulla.chronicler.core.model._

import scala.concurrent.{ExecutionContext, Future}

final class AkkaManagementClient(
    host: String,
    port: Int,
    credentials: Option[InfluxCredentials],
    httpsContext: Option[HttpsConnectionContext],
    terminateActorSystem: Boolean
  )(implicit val ex: ExecutionContext,
    val system: ActorSystem,
    val F: Functor[Future],
    val FK: FunctionK[Future, Future])
  extends InfluxAkkaClient(terminateActorSystem, httpsContext)
  with ManagementClient[Future, Future, HttpResponse, Uri, RequestEntity] {

  implicit val mat: ActorMaterializer  = ActorMaterializer()
  implicit val qb: AkkaQueryBuilder    = new AkkaQueryBuilder(schema, host, port, credentials)
  implicit val jh: AkkaJsonHandler     = new AkkaJsonHandler(new AkkaBodyUnmarshaller(false))
  implicit val re: AkkaRequestExecutor = new AkkaRequestExecutor(ctx)
  implicit val rh: AkkaResponseHandler = new AkkaResponseHandler(jh)

  override def ping: Future[ErrorOr[InfluxDBInfo]] = {
    re.get(qb.buildQuery("/ping"), compressed = false)
      .flatMap(rh.pingResult)
  }
} 
Example 53
Source File: SubscriptionsManagementQuerySpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka.query

import akka.http.scaladsl.model.Uri
import com.github.fsanaulla.chronicler.akka.shared.handlers.AkkaQueryBuilder
import com.github.fsanaulla.chronicler.core.enums.Destinations
import com.github.fsanaulla.chronicler.core.model.InfluxCredentials
import com.github.fsanaulla.chronicler.core.query.SubscriptionsManagementQuery
import org.scalatest.{FlatSpec, Matchers}


class SubscriptionsManagementQuerySpec
  extends FlatSpec
  with Matchers
  with SubscriptionsManagementQuery[Uri] {

  trait AuthEnv {
    val credentials                   = Some(InfluxCredentials("admin", "admin"))
    implicit val qb: AkkaQueryBuilder = new AkkaQueryBuilder("http", "localhost", 8086, credentials)
  }

  trait NonAuthEnv {
    implicit val qb: AkkaQueryBuilder = new AkkaQueryBuilder("http", "localhost", 8086, None)
  }

  val subName                         = "subs"
  val dbName                          = "db"
  val rpName                          = "rp"
  val destType: Destinations.ANY.type = Destinations.ANY
  val hosts: Seq[String]              = Seq("host1", "host2")
  val resHosts: String                = Seq("host1", "host2").map(str => s"'$str'").mkString(", ")

  val createRes =
    s"CREATE SUBSCRIPTION $subName ON $dbName.$rpName DESTINATIONS $destType $resHosts"

  "SubscriptionsManagementQuery" should "create subs query" in new AuthEnv {
    createSubscriptionQuery(subName, dbName, rpName, destType, hosts) shouldEqual
      queryTesterAuth(createRes)(credentials.get)
  }

  it should "create subs query without auth" in new NonAuthEnv {
    createSubscriptionQuery(subName, dbName, rpName, destType, hosts) shouldEqual queryTester(
      createRes
    )
  }

  val dropRes = s"DROP SUBSCRIPTION $subName ON $dbName.$rpName"

  it should "drop subs query" in new AuthEnv {
    dropSubscriptionQuery(subName, dbName, rpName) shouldEqual
      queryTesterAuth(dropRes)(credentials.get)
  }

  it should "drop subs query without auth" in new NonAuthEnv {
    dropSubscriptionQuery(subName, dbName, rpName) shouldEqual queryTester(dropRes)
  }

  val showRes = "SHOW SUBSCRIPTIONS"

  it should "show subs query" in new AuthEnv {
    showSubscriptionsQuery shouldEqual
      queryTesterAuth(showRes)(credentials.get)
  }

  it should "show subs query without auth" in new NonAuthEnv {
    showSubscriptionsQuery shouldEqual queryTester(showRes)
  }
} 
Example 54
Source File: RetentionPolicyManagementQuerySpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka.query

import akka.http.scaladsl.model.Uri
import com.github.fsanaulla.chronicler.akka.shared.handlers.AkkaQueryBuilder
import com.github.fsanaulla.chronicler.core.duration._
import com.github.fsanaulla.chronicler.core.model.InfluxCredentials
import com.github.fsanaulla.chronicler.core.query.RetentionPolicyManagementQuery
import org.scalatest.{FlatSpec, Matchers}

import scala.language.postfixOps


class RetentionPolicyManagementQuerySpec
  extends FlatSpec
  with Matchers
  with RetentionPolicyManagementQuery[Uri] {

  trait AuthEnv {
    val credentials                   = Some(InfluxCredentials("admin", "admin"))
    implicit val qb: AkkaQueryBuilder = new AkkaQueryBuilder("http", "localhost", 8086, credentials)
  }

  trait NonAuthEnv {
    implicit val qb: AkkaQueryBuilder = new AkkaQueryBuilder("http", "localhost", 8086, None)
  }

  val testRPName = "testRP"
  val testDBName = "testDB"

  "RetentionPolicyManagement" should "create retention policy" in new AuthEnv {
    createRPQuery(testRPName, testDBName, 4 hours, 3, Some(4 hours), default = true) shouldEqual
      queryTesterAuth(
        s"CREATE RETENTION POLICY $testRPName ON $testDBName DURATION 4h REPLICATION 3 SHARD DURATION 4h DEFAULT"
      )(credentials.get)

    createRPQuery(testRPName, testDBName, 4 hours, 3, None, default = true) shouldEqual
      queryTesterAuth(
        s"CREATE RETENTION POLICY $testRPName ON $testDBName DURATION 4h REPLICATION 3 DEFAULT"
      )(credentials.get)

    createRPQuery(testRPName, testDBName, 4 hours, 3, Some(4 hours)) shouldEqual
      queryTesterAuth(
        s"CREATE RETENTION POLICY $testRPName ON $testDBName DURATION 4h REPLICATION 3 SHARD DURATION 4h"
      )(credentials.get)
  }

  it should "create retention policy without auth" in new NonAuthEnv {
    createRPQuery(testRPName, testDBName, 4 hours, 3, None) shouldEqual
      queryTester(s"CREATE RETENTION POLICY $testRPName ON $testDBName DURATION 4h REPLICATION 3")
  }

  it should "drop retention policy" in new AuthEnv {
    dropRPQuery(testRPName, testDBName) shouldEqual
      queryTesterAuth(s"DROP RETENTION POLICY $testRPName ON $testDBName")(credentials.get)
  }

  it should "drop retention policy without auth" in new NonAuthEnv {
    dropRPQuery(testRPName, testDBName) shouldEqual
      queryTester(s"DROP RETENTION POLICY $testRPName ON $testDBName")
  }

  it should "update retention policy" in new AuthEnv {
    updateRPQuery(testRPName, testDBName, Some(4 hours), Some(3), Some(4 hours), default = true) shouldEqual
      queryTesterAuth(
        s"ALTER RETENTION POLICY $testRPName ON $testDBName DURATION 4h REPLICATION 3 SHARD DURATION 4h DEFAULT"
      )(credentials.get)

    updateRPQuery(testRPName, testDBName, Some(4 hours), Some(3), None) shouldEqual
      queryTesterAuth(
        s"ALTER RETENTION POLICY $testRPName ON $testDBName DURATION 4h REPLICATION 3"
      )(credentials.get)

  }

  it should "update retention policy without auth" in new NonAuthEnv {
    updateRPQuery(testRPName, testDBName, Some(4 hours), None, None) shouldEqual
      queryTester(s"ALTER RETENTION POLICY $testRPName ON $testDBName DURATION 4h")
    updateRPQuery(testRPName, testDBName, None, Some(3), Some(4 hours)) shouldEqual
      queryTester(
        s"ALTER RETENTION POLICY $testRPName ON $testDBName REPLICATION 3 SHARD DURATION 4h"
      )
  }
} 
Example 55
Source File: ShardManagementQuerySpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka.query

import akka.http.scaladsl.model.Uri
import com.github.fsanaulla.chronicler.akka.shared.handlers.AkkaQueryBuilder
import com.github.fsanaulla.chronicler.core.model.InfluxCredentials
import com.github.fsanaulla.chronicler.core.query.ShardManagementQuery
import org.scalatest.{FlatSpec, Matchers}


class ShardManagementQuerySpec extends FlatSpec with Matchers with ShardManagementQuery[Uri] {

  trait AuthEnv {
    val credentials                   = Some(InfluxCredentials("admin", "admin"))
    implicit val qb: AkkaQueryBuilder = new AkkaQueryBuilder("http", "localhost", 8086, credentials)
  }

  trait NonAuthEnv {
    implicit val qb: AkkaQueryBuilder = new AkkaQueryBuilder("http", "localhost", 8086, None)
  }

  "ShardManagementQuery" should "drop shard by id" in new AuthEnv {
    dropShardQuery(5) shouldEqual queryTesterAuth("DROP SHARD 5")(credentials.get)
  }

  it should "drop shard by id without auth" in new NonAuthEnv {
    dropShardQuery(5) shouldEqual queryTester("DROP SHARD 5")
  }

  it should "show shards" in new AuthEnv {
    showShardsQuery shouldEqual queryTesterAuth("SHOW SHARDS")(credentials.get)
  }

  it should "show shards without auth" in new NonAuthEnv {
    showShardsQuery shouldEqual queryTester("SHOW SHARDS")
  }

  it should "show shard groups" in new AuthEnv {
    showShardGroupsQuery shouldEqual queryTesterAuth("SHOW SHARD GROUPS")(
      credentials.get
    )
  }

  it should "show shard groups without auth" in new NonAuthEnv {
    showShardGroupsQuery shouldEqual queryTester("SHOW SHARD GROUPS")
  }
} 
Example 56
Source File: ContinuousQueriesSpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka.query

import akka.http.scaladsl.model.Uri
import com.github.fsanaulla.chronicler.akka.shared.handlers.AkkaQueryBuilder
import com.github.fsanaulla.chronicler.core.model.InfluxCredentials
import com.github.fsanaulla.chronicler.core.query.ContinuousQueries
import org.scalatest.{FlatSpec, Matchers}


class ContinuousQueriesSpec extends FlatSpec with Matchers with ContinuousQueries[Uri] {

  trait AuthEnv {
    val credentials                   = Some(InfluxCredentials("admin", "admin"))
    implicit val qb: AkkaQueryBuilder = new AkkaQueryBuilder("http", "localhost", 8086, credentials)
  }

  trait NonAuthEnv {
    implicit val qb: AkkaQueryBuilder = new AkkaQueryBuilder("http", "localhost", 8086, None)
  }

  val db    = "mydb"
  val cq    = "bee_cq"
  val query = "SELECT mean(bees) AS mean_bees INTO aggregate_bees FROM farm GROUP BY time(30m)"

  "ContinuousQuerys operation" should "generate correct show query" in new AuthEnv {
    showCQQuery shouldEqual queryTesterAuth("SHOW CONTINUOUS QUERIES")(credentials.get)
  }

  it should "generate correct drop query" in new AuthEnv {
    dropCQQuery(db, cq) shouldEqual queryTesterAuth(s"DROP CONTINUOUS QUERY $cq ON $db")(
      credentials.get
    )
  }

  it should "generate correct create query" in new AuthEnv {
    createCQQuery(db, cq, query) shouldEqual queryTesterAuth(
      s"CREATE CONTINUOUS QUERY $cq ON $db BEGIN $query END"
    )(credentials.get)
  }

  it should "generate correct show query without auth" in new NonAuthEnv {
    showCQQuery shouldEqual queryTester("SHOW CONTINUOUS QUERIES")
  }

  it should "generate correct drop query without auth" in new NonAuthEnv {
    dropCQQuery(db, cq) shouldEqual queryTester(s"DROP CONTINUOUS QUERY $cq ON $db")
  }

  it should "generate correct create query without auth" in new NonAuthEnv {
    createCQQuery(db, cq, query) shouldEqual queryTester(
      s"CREATE CONTINUOUS QUERY $cq ON $db BEGIN $query END"
    )
  }
} 
Example 57
Source File: QueriesManagementQuerySpec.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka.query

import akka.http.scaladsl.model.Uri
import com.github.fsanaulla.chronicler.akka.shared.handlers.AkkaQueryBuilder
import com.github.fsanaulla.chronicler.core.model.InfluxCredentials
import com.github.fsanaulla.chronicler.core.query.QueriesManagementQuery
import org.scalatest.{FlatSpec, Matchers}


class QueriesManagementQuerySpec extends FlatSpec with Matchers with QueriesManagementQuery[Uri] {

  trait AuthEnv {
    val credentials                   = Some(InfluxCredentials("admin", "admin"))
    implicit val qb: AkkaQueryBuilder = new AkkaQueryBuilder("http", "localhost", 8086, credentials)
  }

  trait NonAuthEnv {
    implicit val qb: AkkaQueryBuilder = new AkkaQueryBuilder("http", "localhost", 8086, None)
  }

  "QueryManagement" should "show query" in new AuthEnv {
    showQuerysQuery shouldEqual queryTesterAuth("SHOW QUERIES")(credentials.get)
  }

  it should "kill query" in new AuthEnv {
    killQueryQuery(5) shouldEqual queryTesterAuth("KILL QUERY 5")(credentials.get)
  }

  it should "show query without auth" in new NonAuthEnv {
    showQuerysQuery shouldEqual queryTester("SHOW QUERIES")
  }

  it should "kill query without auth" in new NonAuthEnv {
    killQueryQuery(5) shouldEqual queryTester("KILL QUERY 5")
  }
} 
Example 58
Source File: package.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka

import akka.http.scaladsl.model.Uri
import com.github.fsanaulla.chronicler.core.model.InfluxCredentials

package object query {

  def urlBase(url: String): Uri =
    Uri.from(
      "http",
      host = "localhost",
      port = 8086,
      path = "/query"
    )

  def queryTesterAuth(query: String)(credentials: InfluxCredentials): Uri =
    urlBase(query).withQuery(
      Uri.Query("u" -> credentials.username, "p" -> credentials.username, "q" -> query)
    )

  def queryTesterAuth(db: String, query: String)(credentials: InfluxCredentials): Uri =
    urlBase("/query").withQuery(
      Uri.Query("db" -> db, "u" -> credentials.username, "p" -> credentials.password, "q" -> query)
    )

  def queryTester(query: String): Uri = urlBase("/query").withQuery(Uri.Query("q" -> query))

  def queryTester(db: String, query: String): Uri =
    urlBase("/query").withQuery(Uri.Query("db" -> db, "q" -> query))

  def writeTester(mp: Map[String, String]): Uri = urlBase("/write").withQuery(Uri.Query(mp))

  def queryTesterSimple(query: Map[String, String]): Uri =
    urlBase("/query").withQuery(Uri.Query(query))
} 
Example 59
Source File: AkkaMeasurementApi.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka.io

import akka.http.scaladsl.model.{HttpResponse, RequestEntity, Uri}
import akka.stream.scaladsl.Source
import com.github.fsanaulla.chronicler.akka.shared.handlers.{
  AkkaQueryBuilder,
  AkkaRequestExecutor,
  AkkaResponseHandler
}
import com.github.fsanaulla.chronicler.core.alias.ErrorOr
import com.github.fsanaulla.chronicler.core.api.MeasurementApi
import com.github.fsanaulla.chronicler.core.components.BodyBuilder
import com.github.fsanaulla.chronicler.core.enums.{Epoch, Epochs}
import com.github.fsanaulla.chronicler.core.model.{Failable, Functor, InfluxReader}

import scala.concurrent.Future
import scala.reflect.ClassTag

final class AkkaMeasurementApi[T: ClassTag](
    dbName: String,
    measurementName: String,
    gzipped: Boolean
  )(implicit qb: AkkaQueryBuilder,
    bd: BodyBuilder[RequestEntity],
    re: AkkaRequestExecutor,
    rh: AkkaResponseHandler,
    F: Functor[Future],
    FA: Failable[Future])
  extends MeasurementApi[Future, Future, HttpResponse, Uri, RequestEntity, T](
    dbName,
    measurementName,
    gzipped
  ) {

  
  def readChunked(
      query: String,
      epoch: Epoch = Epochs.None,
      pretty: Boolean = false,
      chunkSize: Int
    )(implicit rd: InfluxReader[T]
    ): Future[Source[ErrorOr[Array[T]], Any]] = {
    val uri = chunkedQuery(dbName, query, epoch, pretty, chunkSize)
    F.map(re.get(uri, compressed = false))(rh.queryChunkedResult[T])
  }
} 
Example 60
Source File: AkkaIOClient.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka.io

import akka.actor.ActorSystem
import akka.http.scaladsl.HttpsConnectionContext
import akka.http.scaladsl.model.{HttpResponse, RequestEntity, Uri}
import akka.stream.ActorMaterializer
import com.github.fsanaulla.chronicler.akka.shared.InfluxAkkaClient
import com.github.fsanaulla.chronicler.akka.shared.handlers._
import com.github.fsanaulla.chronicler.akka.shared.implicits._
import com.github.fsanaulla.chronicler.core.IOClient
import com.github.fsanaulla.chronicler.core.alias.ErrorOr
import com.github.fsanaulla.chronicler.core.model.{InfluxCredentials, InfluxDBInfo}

import scala.concurrent.{ExecutionContext, Future}
import scala.reflect.ClassTag

final class AkkaIOClient(
    host: String,
    port: Int,
    credentials: Option[InfluxCredentials],
    compress: Boolean,
    httpsContext: Option[HttpsConnectionContext],
    terminateActorSystem: Boolean
  )(implicit ex: ExecutionContext,
    system: ActorSystem)
  extends InfluxAkkaClient(terminateActorSystem, httpsContext)
  with IOClient[Future, Future, HttpResponse, Uri, RequestEntity] {

  implicit val mat: ActorMaterializer  = ActorMaterializer()
  implicit val bb: AkkaBodyBuilder     = new AkkaBodyBuilder()
  implicit val qb: AkkaQueryBuilder    = new AkkaQueryBuilder(schema, host, port, credentials)
  implicit val jh: AkkaJsonHandler     = new AkkaJsonHandler(new AkkaBodyUnmarshaller(compress))
  implicit val re: AkkaRequestExecutor = new AkkaRequestExecutor(ctx)
  implicit val rh: AkkaResponseHandler = new AkkaResponseHandler(jh)

  override def database(dbName: String): AkkaDatabaseApi =
    new AkkaDatabaseApi(dbName, compress)

  override def measurement[A: ClassTag](
      dbName: String,
      measurementName: String
    ): AkkaMeasurementApi[A] =
    new AkkaMeasurementApi[A](dbName, measurementName, compress)

  override def ping: Future[ErrorOr[InfluxDBInfo]] = {
    re.get(qb.buildQuery("/ping", Nil), compressed = false)
      .flatMap(rh.pingResult)
  }
} 
Example 61
Source File: AkkaDatabaseApi.scala    From chronicler   with Apache License 2.0 5 votes vote down vote up
package com.github.fsanaulla.chronicler.akka.io

import akka.http.scaladsl.model.{HttpResponse, RequestEntity, Uri}
import akka.stream.scaladsl.Source
import com.github.fsanaulla.chronicler.akka.shared.handlers.{
  AkkaQueryBuilder,
  AkkaRequestExecutor,
  AkkaResponseHandler
}
import com.github.fsanaulla.chronicler.core.alias.{ErrorOr, JPoint}
import com.github.fsanaulla.chronicler.core.api.DatabaseApi
import com.github.fsanaulla.chronicler.core.components.BodyBuilder
import com.github.fsanaulla.chronicler.core.enums.{Epoch, Epochs}
import com.github.fsanaulla.chronicler.core.model.{FunctionK, Functor}

import scala.concurrent.Future

final class AkkaDatabaseApi(
    dbName: String,
    compressed: Boolean
  )(implicit qb: AkkaQueryBuilder,
    bd: BodyBuilder[RequestEntity],
    re: AkkaRequestExecutor,
    rh: AkkaResponseHandler,
    F: Functor[Future],
    FK: FunctionK[Future, Future])
  extends DatabaseApi[Future, Future, HttpResponse, Uri, RequestEntity](dbName, compressed) {

  
  def readChunkedJson(
      query: String,
      epoch: Epoch = Epochs.None,
      pretty: Boolean = false,
      chunkSize: Int
    ): Future[Source[ErrorOr[Array[JPoint]], Any]] = {
    val uri = chunkedQuery(dbName, query, epoch, pretty, chunkSize)
    F.map(re.get(uri, compressed))(rh.queryChunkedResultJson)
  }
} 
Example 62
Source File: WebService.scala    From heimdallr   with Apache License 2.0 5 votes vote down vote up
package chat

import scala.concurrent.ExecutionContext.Implicits._
import scala.util.{Failure,Success}
import akka.actor.ActorSystem
import akka.stream.Materializer
import akka.http.scaladsl.Http
import akka.http.scaladsl.Http.{ ServerBinding }
import akka.http.scaladsl.model.{ HttpRequest, HttpResponse, Uri }
import akka.stream.scaladsl.{ Flow, Sink, Source }
import org.slf4j.LoggerFactory

trait WebService {
  val log = LoggerFactory.getLogger("total")
  private var binding: scala.concurrent.Future[ServerBinding] = null

  def serviceBind(serviceName: String, bindRoute: Flow[HttpRequest, HttpResponse, Any], bindPort: Int)
                 (implicit actorSystem: ActorSystem, materializer: Materializer): Unit = {
    binding = Http().bindAndHandle(bindRoute,"0.0.0.0", bindPort)

    // the rest of the sample code will go here
    binding.onComplete {
      //binding success check
      case Success(binding) =>
        val localAddress = binding.localAddress
        log.info(s"${serviceName} is listening on ${localAddress.getAddress}:${localAddress.getPort}")

      case Failure(e) =>
        log.error(s"${serviceName} Binding failed with ${e.getMessage}")
    }
  }

  def serviceUnbind(serviceName: String) = {
    if( binding != null )
    {
      binding
        .flatMap(_.unbind())
        .onComplete(_ =>
          log.info(s"${serviceName} listening port unbinding ... ")
        )
    }
    else
      log.info( s"${serviceName} Unbinding Failed !" )
  }
} 
Example 63
Source File: SparqlWriteQuery.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.commons.sparql.client

import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.rdf.Graph


  def replace(graph: Uri, data: Graph): SparqlReplaceQuery =
    SparqlReplaceQuery(
      s"""DROP GRAPH <$graph>;
       |
       |INSERT DATA {
       |  GRAPH <$graph> {
       |    ${data.ntriples}
       |  }
       |};""".stripMargin,
      graph
    )
} 
Example 64
Source File: StaticResourceRoutes.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.commons.http.routes

import java.util.regex.Pattern.quote

import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import ch.epfl.bluebrain.nexus.commons.http.JsonLdCirceSupport._
import ch.epfl.bluebrain.nexus.commons.http.directives.PrefixDirectives
import io.circe.Json
import io.circe.parser.parse

import scala.io.Source


class StaticResourceRoutes(resourcePaths: Map[String, String], prefix: String, baseUri: Uri) extends PrefixDirectives {

  private def contentOf(file: String): String = {
    val source   = Source.fromInputStream(getClass.getResourceAsStream(file))
    val contents = source.mkString
    source.close()
    contents
  }

  private def contentOf(file: String, replacements: Map[String, String]): String =
    replacements.foldLeft(contentOf(file)) {
      case (value, (regex, replacement)) => value.replaceAll(regex, replacement)
    }

  private val baseReplacement: Map[String, String] = Map(quote("{{base}}") -> baseUri.toString)

  private lazy val resources: Map[String, Json] =
    resourcePaths.view
      .mapValues { resource => parse(contentOf(resource, baseReplacement)).toOption }
      .flatMap {
        case (key, value) =>
          value match {
            case Some(v) => Some((key, v))
            case None    => None
          }
      }
      .toMap

  def routes: Route =
    uriPrefix(baseUri) {
      (get & pathPrefix(prefix)) {
        extractUnmatchedPath { resourcePath =>
          resources.get(resourcePath.toString) match {
            case Some(json) => complete(json)
            case None       => reject
          }
        }
      }
    }

} 
Example 65
Source File: QueryResultEncoder.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.search

import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.Uri.Query
import ch.epfl.bluebrain.nexus.commons.search.QueryResult.{ScoredQueryResult, UnscoredQueryResult}
import ch.epfl.bluebrain.nexus.commons.search.QueryResults.{ScoredQueryResults, UnscoredQueryResults}
import ch.epfl.bluebrain.nexus.commons.search.{FromPagination, QueryResult, QueryResults}
import ch.epfl.bluebrain.nexus.kg.config.Contexts.{resourceCtxUri, searchCtxUri}
import ch.epfl.bluebrain.nexus.kg.directives.QueryDirectives.{after, from, size}
import ch.epfl.bluebrain.nexus.kg.indexing.SparqlLink
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import ch.epfl.bluebrain.nexus.service.config.Vocabulary.nxv
import io.circe.syntax._
import io.circe.{Encoder, Json}
trait LowPriorityQueryResultsEncoder {

  implicit def qrsEncoderLowPrio[A: Encoder]: Encoder[QueryResults[A]] =
    Encoder.instance(qrsEncoderJsonLinks[A](None).apply(_))

  implicit private val uriEncoder: Encoder[Uri] = Encoder.encodeString.contramap(_.toString)

  protected def qrsEncoderJsonLinks[A: Encoder](next: Option[Uri]): Encoder[QueryResults[A]] = {
    implicit def qrEncoderJson: Encoder[QueryResult[A]]     =
      Encoder.instance {
        case UnscoredQueryResult(v)      => v.asJson.removeKeys(nxv.original_source.prefix)
        case ScoredQueryResult(score, v) =>
          v.asJson.removeKeys(nxv.original_source.prefix) deepMerge
            Json.obj(nxv.score.prefix -> Json.fromFloatOrNull(score))
      }
    def json(total: Long, list: List[QueryResult[A]]): Json =
      Json
        .obj(nxv.total.prefix -> Json.fromLong(total), nxv.results.prefix -> Json.arr(list.map(qrEncoderJson(_)): _*))
        .addContext(searchCtxUri)
        .addContext(resourceCtxUri)

    Encoder.instance {
      case UnscoredQueryResults(total, list, _)         =>
        json(total, list) deepMerge Json.obj(nxv.next.prefix -> next.asJson)
      case ScoredQueryResults(total, maxScore, list, _) =>
        json(total, list) deepMerge
          Json.obj(nxv.maxScore.prefix -> maxScore.asJson, nxv.next.prefix -> next.asJson)
    }
  }
}

object QueryResultEncoder extends LowPriorityQueryResultsEncoder {

  implicit def qrsEncoderJson(implicit searchUri: Uri, http: HttpConfig): Encoder[QueryResults[Json]] =
    Encoder.instance { results =>
      val nextLink = results.token.flatMap(next(searchUri, _))
      qrsEncoderJsonLinks[Json](nextLink).apply(results)
    }

  implicit def qrsEncoderJson(implicit
      searchUri: Uri,
      pagination: FromPagination,
      http: HttpConfig
  ): Encoder[QueryResults[SparqlLink]] =
    Encoder.instance { results =>
      val nextLink = next(searchUri, results.total, pagination)
      qrsEncoderJsonLinks[SparqlLink](nextLink).apply(results)
    }

  private def next(current: Uri, total: Long, pagination: FromPagination)(implicit http: HttpConfig): Option[Uri] = {
    val nextFrom = pagination.from + pagination.size
    if (nextFrom < total.toInt) {
      val params = current.query().toMap + (from -> nextFrom.toString) + (size -> pagination.size.toString)
      Some(toPublic(current).withQuery(Query(params)))
    } else None
  }

  private def next(current: Uri, afterToken: String)(implicit http: HttpConfig): Option[Uri] =
    current.query().get(after) match {
      case Some(`afterToken`) => None
      case _                  =>
        val params = current.query().toMap + (after -> afterToken) - from
        Some(toPublic(current).withQuery(Query(params)))
    }

  private def toPublic(uri: Uri)(implicit http: HttpConfig): Uri =
    uri.copy(scheme = http.publicUri.scheme, authority = http.publicUri.authority)
} 
Example 66
Source File: RemoteDiskStorageOperations.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.storage

import akka.http.scaladsl.model.Uri
import cats.Applicative
import cats.effect.Effect
import cats.implicits._
import ch.epfl.bluebrain.nexus.iam.auth.AccessToken
import ch.epfl.bluebrain.nexus.iam.client.types.AuthToken
import ch.epfl.bluebrain.nexus.kg.resources.ResId
import ch.epfl.bluebrain.nexus.kg.resources.file.File._
import ch.epfl.bluebrain.nexus.kg.storage.Storage._
import ch.epfl.bluebrain.nexus.storage.client.StorageClient
import ch.epfl.bluebrain.nexus.storage.client.types.FileAttributes.{Digest => StorageDigest}
import ch.epfl.bluebrain.nexus.storage.client.types.{FileAttributes => StorageFileAttributes}

object RemoteDiskStorageOperations {

  // TODO: Remove when migrating ADMIN client
  implicit private def oldTokenConversion(implicit token: Option[AccessToken]): Option[AuthToken] =
    token.map(t => AuthToken(t.value))

  implicit private def toDigest(digest: StorageDigest): Digest = Digest(digest.algorithm, digest.value)

  
  final class FetchAttributes[F[_]](storage: RemoteDiskStorage, client: StorageClient[F])
      extends FetchFileAttributes[F] {
    implicit val cred = storage.credentials.map(AccessToken)

    override def apply(relativePath: Uri.Path): F[StorageFileAttributes] =
      client.getAttributes(storage.folder, relativePath)
  }

} 
Example 67
Source File: Settings.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.service.config

import java.nio.file.{Path, Paths}

import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.iam.auth.AccessToken
import ch.epfl.bluebrain.nexus.iam.types.Permission
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import com.typesafe.config.Config
import pureconfig.generic.auto._
import pureconfig.ConvertHelpers.{catchReadError, optF}
import pureconfig.{ConfigConvert, ConfigSource}

import scala.annotation.nowarn


@SuppressWarnings(Array("LooksLikeInterpolatedString"))
class Settings(config: Config) extends Extension {

  @nowarn("cat=unused")
  implicit private val uriConverter: ConfigConvert[Uri] =
    ConfigConvert.viaString[Uri](catchReadError(Uri(_)), _.toString)

  @nowarn("cat=unused")
  implicit private val permissionConverter: ConfigConvert[Permission] =
    ConfigConvert.viaString[Permission](optF(Permission(_)), _.toString)

  @nowarn("cat=unused")
  implicit val absoluteIriConverter: ConfigConvert[AbsoluteIri] =
    ConfigConvert.viaString[AbsoluteIri](catchReadError(s => url"$s"), _.toString)

  @nowarn("cat=unused")
  implicit private val pathConverter: ConfigConvert[Path] =
    ConfigConvert.viaString[Path](catchReadError(s => Paths.get(s)), _.toString)

  @nowarn("cat=unused")
  implicit private val authTokenConverter: ConfigConvert[AccessToken] =
    ConfigConvert.viaString[AccessToken](catchReadError(s => AccessToken(s)), _.value)

  val serviceConfig: ServiceConfig =
    ConfigSource.fromConfig(config).at("app").loadOrThrow[ServiceConfig]
}

object Settings extends ExtensionId[Settings] with ExtensionIdProvider {

  override def lookup(): ExtensionId[_ <: Extension] = Settings

  override def createExtension(system: ExtendedActorSystem): Settings = apply(system.settings.config)

  def apply(config: Config): Settings = new Settings(config)
} 
Example 68
Source File: BlazegraphClientFixture.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.commons.sparql.client

import java.util.Properties

import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.commons.sparql.client.BlazegraphClientFixture._
import ch.epfl.bluebrain.nexus.util.Randomness._

import scala.jdk.CollectionConverters._

trait BlazegraphClientFixture {

  val namespace: String = genString(8)
  val rand: String      = genString(length = 8)
  val graph: Uri        = s"http://$localhost:8080/graphs/$rand"
  val id: String        = genString()
  val label: String     = genString()
  val value: String     = genString()
}

object BlazegraphClientFixture {

  val localhost = "127.0.0.1"

  def properties(file: String = "/commons/sparql/index.properties"): Map[String, String] = {
    val props = new Properties()
    props.load(getClass.getResourceAsStream(file))
    props.asScala.toMap
  }
} 
Example 69
Source File: PrefixDirectivesSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.commons.http.directives

import akka.http.scaladsl.model.{StatusCodes, Uri}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.testkit.ScalatestRouteTest
import ch.epfl.bluebrain.nexus.commons.http.directives.PrefixDirectives._
import com.typesafe.config.{Config, ConfigFactory}
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers

class PrefixDirectivesSpec extends AnyWordSpecLike with Matchers with Inspectors with ScalatestRouteTest {

  override def testConfig: Config = ConfigFactory.empty()

  "A PrefixDirective" should {

    "match the prefix uri" in {
      forAll(
        Map(
          ""         -> "",
          "/"        -> "",
          "///"      -> "",
          "/dev"     -> "/dev",
          "/dev/"    -> "/dev",
          "/dev///"  -> "/dev",
          "/dev/sn/" -> "/dev/sn"
        ).toList
      ) {
        case (suffix, prefix) =>
          val uri   = Uri("http://localhost:80" + suffix)
          val route = uriPrefix(uri) {
            path("remainder") {
              get {
                complete(StatusCodes.OK)
              }
            }
          }

          Get(prefix + "/remainder") ~> route ~> check {
            status shouldEqual StatusCodes.OK
          }
      }
    }
  }
} 
Example 70
Source File: ElasticServer.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.commons.es.server.embed

import java.nio.file.Files
import java.util.Arrays._

import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.commons.es.server.embed.ElasticServer.MyNode
import ch.epfl.bluebrain.nexus.util.{ActorSystemFixture, Randomness}
import org.apache.commons.io.FileUtils
import org.elasticsearch.common.settings.Settings
import org.elasticsearch.index.reindex.ReindexPlugin
import org.elasticsearch.node.Node
import org.elasticsearch.painless.PainlessPlugin
import org.elasticsearch.plugins.Plugin
import org.elasticsearch.transport.Netty4Plugin
import org.scalatest.wordspec.AnyWordSpecLike
import org.scalatest.BeforeAndAfterAll

import scala.jdk.CollectionConverters._
import scala.util.Try

// $COVERAGE-OFF$
abstract class ElasticServer
    extends ActorSystemFixture("ElasticServer")
    with AnyWordSpecLike
    with BeforeAndAfterAll
    with Randomness {

  override protected def beforeAll(): Unit = {
    super.beforeAll()
    startElastic()
  }

  override protected def afterAll(): Unit = {
    stopElastic()
    super.afterAll()
  }

  val startPort = freePort()
  val endPort   = startPort + 100

  val esUri       = Uri(s"http://localhost:$startPort")
  implicit val ec = system.dispatcher

  private val clusterName = "elasticsearch"

  private val dataDir  = Files.createTempDirectory("elasticsearch_data_").toFile
  private val settings = Settings
    .builder()
    .put("path.home", dataDir.toString)
    .put("http.port", s"$startPort-$endPort")
    .put("http.cors.enabled", true)
    .put("cluster.name", clusterName)
    .put("http.type", "netty4")
    .build

  private lazy val node =
    new MyNode(settings, asList(classOf[Netty4Plugin], classOf[PainlessPlugin], classOf[ReindexPlugin]))

  def startElastic(): Unit = {
    node.start()
    ()
  }

  def stopElastic(): Unit = {
    node.close()
    Try(FileUtils.forceDelete(dataDir))
    ()
  }
}

object ElasticServer extends Randomness {

  import java.util

  import org.elasticsearch.node.InternalSettingsPreparer

  private class MyNode(preparedSettings: Settings, classpathPlugins: util.Collection[Class[_ <: Plugin]])
      extends Node(
        InternalSettingsPreparer
          .prepareEnvironment(preparedSettings, Map.empty[String, String].asJava, null, () => "elasticsearch"),
        classpathPlugins,
        true
      ) {}
}
// $COVERAGE-ON$ 
Example 71
Source File: QueryResultEncoderSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.search

import java.time.Instant
import java.util.regex.Pattern.quote

import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.Uri.Query
import ch.epfl.bluebrain.nexus.commons.circe.syntax._
import ch.epfl.bluebrain.nexus.commons.search.QueryResult.{ScoredQueryResult, UnscoredQueryResult}
import ch.epfl.bluebrain.nexus.commons.search.QueryResults
import ch.epfl.bluebrain.nexus.commons.search.QueryResults.{ScoredQueryResults, UnscoredQueryResults}
import ch.epfl.bluebrain.nexus.commons.test.{Randomness, Resources}
import ch.epfl.bluebrain.nexus.kg.search.QueryResultEncoder._
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig
import ch.epfl.bluebrain.nexus.service.config.ServiceConfig.HttpConfig
import io.circe.Json
import io.circe.syntax._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class QueryResultEncoderSpec extends AnyWordSpecLike with Matchers with Resources with Randomness {

  implicit val orderedKeys = ServiceConfig.orderedKeys
  val org                  = genString()
  val proj                 = genString()
  val schema               = genString()
  val now                  = Instant.now()
  implicit val http        = HttpConfig("", 0, "v1", "http://nexus.com")
  implicit val uri         = Uri(s"http://nexus.com/resources/$org/$proj/$schema?type=someType&from=10&size=10")
  val before               = now.minusSeconds(60)

  "QueryResultsEncoder" should {
    def json(id: AbsoluteIri, createdAt: Instant): Json =
      jsonContentOf(
        "/resources/es-metadata.json",
        Map(
          quote("{id}")      -> id.asString,
          quote("{org}")     -> org,
          quote("{proj}")    -> proj,
          quote("{schema}")  -> schema,
          quote("{instant}") -> createdAt.toString
        )
      ) deepMerge Json.obj("_original_source" -> Json.fromString(Json.obj("k" -> Json.fromInt(1)).noSpaces))

    "encode ScoredQueryResults" in {
      val results: QueryResults[Json] = ScoredQueryResults[Json](
        3,
        0.3f,
        List(
          ScoredQueryResult(0.3f, json(url"http://nexus.com/result1", before)),
          ScoredQueryResult(0.2f, json(url"http://nexus.com/result2", before)),
          ScoredQueryResult(0.1f, json(url"http://nexus.com/result3", now))
        ),
        sort(now)
      )

      results.asJson.sortKeys shouldEqual jsonContentOf(
        "/search/scored-query-results.json",
        Map(
          quote("{org}")                -> org,
          quote("{proj}")               -> proj,
          quote("{schema}")             -> schema,
          quote("{before}")             -> before.toString,
          quote("{lastElementCreated}") -> now.toString,
          quote("{after}")              -> after(now)
        )
      )
    }
    "encode UnscoredQueryResults" in {
      val results: QueryResults[Json] = UnscoredQueryResults[Json](
        3,
        List(
          UnscoredQueryResult(json(url"http://nexus.com/result1", before)),
          UnscoredQueryResult(json(url"http://nexus.com/result2", before)),
          UnscoredQueryResult(json(url"http://nexus.com/result3", now))
        ),
        sort(now)
      )

      results.asJson.sortKeys shouldEqual jsonContentOf(
        "/search/unscored-query-results.json",
        Map(
          quote("{org}")                -> org,
          quote("{proj}")               -> proj,
          quote("{schema}")             -> schema,
          quote("{before}")             -> before.toString,
          quote("{lastElementCreated}") -> now.toString,
          quote("{after}")              -> after(now)
        )
      )

    }
  }

  private def sort(instant: Instant): Option[String] = Some(Json.arr(Json.fromString(instant.toString)).noSpaces)
  private def after(instant: Instant): String        =
    Query("after" -> List(Json.fromString(instant.toString)).asJson.noSpaces).toString()

} 
Example 72
Source File: DiskStorageOperationsSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.storage

import java.nio.file.Paths

import akka.http.scaladsl.model.{ContentTypes, Uri}
import cats.effect.IO
import ch.epfl.bluebrain.nexus.commons.test._
import ch.epfl.bluebrain.nexus.commons.test.io.IOEitherValues
import ch.epfl.bluebrain.nexus.kg.config.KgConfig._
import ch.epfl.bluebrain.nexus.kg.resources.file.File.FileDescription
import ch.epfl.bluebrain.nexus.kg.resources.Id
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.{KgError, TestHelper}
import ch.epfl.bluebrain.nexus.service.config.Settings
import ch.epfl.bluebrain.nexus.sourcing.RetryStrategyConfig
import org.mockito.IdiomaticMockito
import org.scalatest.{BeforeAndAfter, OptionValues}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

import scala.concurrent.duration._

class DiskStorageOperationsSpec
    extends ActorSystemFixture("DiskStorageOperationsSpec")
    with AnyWordSpecLike
    with Matchers
    with BeforeAndAfter
    with IdiomaticMockito
    with IOEitherValues
    with Resources
    with TestHelper
    with OptionValues {

  implicit private val appConfig = Settings(system).serviceConfig

  implicit private val sc: StorageConfig = appConfig.kg.storage.copy(
    DiskStorageConfig(Paths.get("/tmp"), "SHA-256", read, write, false, 1024L),
    RemoteDiskStorageConfig("http://example.com", "v1", None, "SHA-256", read, write, true, 1024L),
    S3StorageConfig("MD5", read, write, true, 1024L),
    "password",
    "salt",
    RetryStrategyConfig("linear", 300.millis, 5.minutes, 100, 1.second)
  )

  private val project  = ProjectRef(genUUID)
  private val storage  = Storage.DiskStorage.default(project)
  private val resId    = Id(storage.ref, genIri)
  private val fileDesc = FileDescription("my file.txt", ContentTypes.`text/plain(UTF-8)`)

  "DiskStorageOperations" should {

    "verify when the storage exists" in {
      val verify = new DiskStorageOperations.VerifyDiskStorage[IO](storage)
      verify.apply.accepted
    }

    "save and fetch files" in {
      val save   = new DiskStorageOperations.SaveDiskFile[IO](storage)
      val fetch  = new DiskStorageOperations.FetchDiskFile[IO]()
      val source = genSource

      val attr    = save.apply(resId, fileDesc, source).ioValue
      attr.bytes shouldEqual 16L
      attr.filename shouldEqual fileDesc.filename
      attr.mediaType shouldEqual fileDesc.mediaType.value
      attr.location shouldEqual Uri(s"file:///tmp/${mangle(project, attr.uuid, "my%20file.txt")}")
      attr.path shouldEqual attr.location.path.tail.tail.tail
      val fetched = fetch.apply(attr).ioValue

      consume(source) shouldEqual consume(fetched)
    }

    "not link files" in {
      val link = new DiskStorageOperations.LinkDiskFile[IO]()
      link.apply(resId, fileDesc, Uri.Path("/foo")).failed[KgError] shouldEqual KgError.UnsupportedOperation
    }
  }

} 
Example 73
Source File: RemoteDiskStorageOperationsSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.storage

import akka.http.scaladsl.model.ContentTypes._
import akka.http.scaladsl.model.Uri
import cats.effect.IO
import ch.epfl.bluebrain.nexus.commons.test.io.IOEitherValues
import ch.epfl.bluebrain.nexus.commons.test.{ActorSystemFixture, Resources}
import ch.epfl.bluebrain.nexus.iam.auth.AccessToken
import ch.epfl.bluebrain.nexus.iam.client.types.AuthToken
import ch.epfl.bluebrain.nexus.iam.types.Permission
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.resources.file.File.{Digest, FileAttributes, FileDescription}
import ch.epfl.bluebrain.nexus.kg.resources.Id
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.storage.Storage.RemoteDiskStorage
import ch.epfl.bluebrain.nexus.storage.client.StorageClient
import ch.epfl.bluebrain.nexus.storage.client.types.FileAttributes.{Digest => StorageDigest}
import ch.epfl.bluebrain.nexus.storage.client.types.{FileAttributes => StorageFileAttributes}
import org.mockito.{IdiomaticMockito, Mockito}
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class RemoteDiskStorageOperationsSpec
    extends ActorSystemFixture("RemoteDiskStorageOperationsSpec")
    with AnyWordSpecLike
    with Matchers
    with BeforeAndAfter
    with IdiomaticMockito
    with IOEitherValues
    with Resources
    with TestHelper {

  private val endpoint = "http://nexus.example.com/v1"

  // TODO: Remove when migrating ADMIN client
  implicit private def oldTokenConversion(implicit token: Option[AccessToken]): Option[AuthToken] =
    token.map(t => AuthToken(t.value))

  sealed trait Ctx {
    val cred                                = genString()
    implicit val token: Option[AccessToken] = Some(AccessToken(cred))
    val path                                = Uri.Path(s"${genString()}/${genString()}")
    // format: off
    val storage = RemoteDiskStorage(ProjectRef(genUUID), genIri, 1L, false, false, "SHA-256", endpoint, Some(cred), genString(), Permission.unsafe(genString()), Permission.unsafe(genString()), 1024L)
    val attributes = FileAttributes(s"$endpoint/${storage.folder}/$path", path, s"${genString()}.json", `application/json`, 12L, Digest("SHA-256", genString()))
    // format: on
  }

  private val client = mock[StorageClient[IO]]

  before {
    Mockito.reset(client)
  }

  "RemoteDiskStorageOperations" should {

    "verify when storage exists" in new Ctx {
      client.exists(storage.folder) shouldReturn IO(true)
      val verify = new RemoteDiskStorageOperations.Verify[IO](storage, client)
      verify.apply.accepted
    }

    "verify when storage does not exists" in new Ctx {
      client.exists(storage.folder) shouldReturn IO(false)
      val verify = new RemoteDiskStorageOperations.Verify[IO](storage, client)
      verify.apply
        .rejected[
          String
        ] shouldEqual s"Folder '${storage.folder}' does not exists on the endpoint '${storage.endpoint}'"
    }

    "fetch file" in new Ctx {
      val source       = genSource
      client.getFile(storage.folder, path) shouldReturn IO(source)
      val fetch        = new RemoteDiskStorageOperations.Fetch[IO](storage, client)
      val resultSource = fetch.apply(attributes).ioValue
      consume(resultSource) shouldEqual consume(source)
    }

    "link file" in new Ctx {
      val id               = Id(storage.ref, genIri)
      val sourcePath       = Uri.Path(s"${genString()}/${genString()}")
      val destRelativePath = Uri.Path(mangle(storage.ref, attributes.uuid, attributes.filename))
      client.moveFile(storage.folder, sourcePath, destRelativePath) shouldReturn
        IO(
          StorageFileAttributes(
            attributes.location,
            attributes.bytes,
            StorageDigest(attributes.digest.algorithm, attributes.digest.value),
            attributes.mediaType
          )
        )
      val link             = new RemoteDiskStorageOperations.Link[IO](storage, client)
      link
        .apply(id, FileDescription(attributes.uuid, attributes.filename, Some(attributes.mediaType)), sourcePath)
        .ioValue shouldEqual attributes.copy(path = destRelativePath)
    }
  }
} 
Example 74
Source File: PackageObjectSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.storage

import java.nio.file.Paths
import java.util.UUID

import akka.actor.ActorSystem
import akka.http.scaladsl.model.Uri
import akka.stream.scaladsl.FileIO
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.resources.file.File.Digest
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.flatspec.AnyFlatSpecLike
import org.scalatest.matchers.should.Matchers

class PackageObjectSpec extends AnyFlatSpecLike with Matchers with ScalaFutures {

  "uriToPath" should "convert an Akka Uri that represents a valid file path to a Java Path" in {
    uriToPath("file:///some/path/my%20file.txt") shouldEqual Some(Paths.get("/some/path/my file.txt"))
    uriToPath("s3://some/path") shouldEqual None
    uriToPath("foo") shouldEqual None
  }

  "pathToUri" should "convert a Java Path to an Akka Uri" in {
    pathToUri(Paths.get("/some/path/my file.txt")) shouldEqual Uri("file:///some/path/my%20file.txt")
  }

  "mangle" should "generate a properly mangled path given a file project and UUID" in {
    val projUuid = UUID.fromString("4947db1e-33d8-462b-9754-3e8ae74fcd4e")
    val fileUuid = UUID.fromString("b1d7cda2-1ec0-40d2-b12e-3baf4895f7d7")
    mangle(ProjectRef(projUuid), fileUuid, "my file.jpg") shouldEqual
      "4947db1e-33d8-462b-9754-3e8ae74fcd4e/b/1/d/7/c/d/a/2/my file.jpg"
  }

  "digest" should "properly compute the hash of a given input" in {
    implicit val as: ActorSystem = ActorSystem()

    val filePath = "/storage/s3.json"
    val path     = Paths.get(getClass.getResource(filePath).toURI)
    val input    = FileIO.fromPath(path)
    val algo     = "SHA-256"

    input.runWith(digestSink(algo)(as.dispatcher)).futureValue shouldEqual Digest(
      algo,
      "5602c497e51680bef1f3120b1d6f65d480555002a3290029f8178932e8f4801a"
    )
  }
} 
Example 75
Source File: FromAkkaConverters.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.rdf.akka

import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.rdf.Iri
import ch.epfl.bluebrain.nexus.rdf.Iri.{AbsoluteIri, Path}
import ch.epfl.bluebrain.nexus.rdf.Iri.Path.{Segment, Slash}
import ch.epfl.bluebrain.nexus.rdf.Node.IriNode

import scala.annotation.tailrec


  def asIriPath(path: Uri.Path): Iri.Path = {
    @tailrec
    def inner(acc: Iri.Path, remaining: Uri.Path): Iri.Path =
      remaining match {
        case Uri.Path.SingleSlash         => Slash(acc)
        case Uri.Path.Empty               => acc
        case Uri.Path.Slash(tail)         => inner(Slash(acc), tail)
        case Uri.Path.Segment(head, tail) => inner(Segment(head, acc), tail)
      }
    inner(Path.Empty, path)
  }
} 
Example 76
Source File: AkkaConvertersSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.rdf.akka

import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.rdf.Iri.Path
import ch.epfl.bluebrain.nexus.rdf.Node.{BNode, IriNode, Literal}
import ch.epfl.bluebrain.nexus.rdf.RdfSpec
import ch.epfl.bluebrain.nexus.rdf.akka.syntax.all._
import ch.epfl.bluebrain.nexus.rdf.syntax.all._

class AkkaConvertersSpec extends RdfSpec {

  "An Node" should {
    "be converted correctly to URI" when {
      "it's an IriNode with a valid URL" in {
        IriNode(url"http://example.com/path").asAkka.rightValue shouldEqual Uri("http://example.com/path")
      }
      "it's an IriNode with Iri which is not a valid Uri" in {
        IriNode(url"http://example.com/päth").asAkka.rightValue shouldEqual Uri("http://example.com/p%C3%A4th")
      }
    }

    "fail to convert" when {
      "it's not an IriNode" in {
        BNode("1").rightValue.asAkka.leftValue shouldEqual "_:1 cannot be converted to URI."
        Literal(
          true
        ).asAkka.leftValue shouldEqual "\"true\"^^<http://www.w3.org/2001/XMLSchema#boolean> cannot be converted to URI."
      }
    }
  }

  "An AbsoluteIri" should {
    "be converted correctly to Uri" when {
      "it's a valid Uri" in {
        url"http://example.com/path".asAkka shouldEqual Uri("http://example.com/path")
      }
      "it's not a valid Uri" in {
        url"http://example.com/päth".asAkka shouldEqual Uri("http://example.com/p%C3%A4th")
      }
    }
  }

  "An Akka Uri" should {
    "convert to AbsoluteIri" in {
      Uri("http://example.com/path").asAbsoluteIri shouldEqual url"http://example.com/path"
    }
    "convert to IriNode" in {
      Uri("http://example.com/path").asRdfNode shouldEqual IriNode(url"http://example.com/path")
    }
  }

  "An Uri.Path" should {
    "be converted to Iri.Path" in {
      Uri.Path("/a/b/c/ Æ").asIriPath shouldEqual Path("/a/b/c/%20Æ").rightValue
      Uri.Path("/a/b/c/d/").asIriPath shouldEqual Path("/a/b/c/d/").rightValue
      Uri.Path("/").asIriPath shouldEqual Path("/").rightValue
      Uri.Path("").asIriPath shouldEqual Path("").rightValue
    }
  }

  "An Iri.Path" should {
    "be converted to Uri.Path" in {
      Path("/a/b/Æ").rightValue.asAkka shouldEqual Uri.Path("/a/b/%C3%86")
      Path("/a/b/c/d/").rightValue.asAkka shouldEqual Uri.Path("/a/b/c/d/")
      Path("/").rightValue.asAkka shouldEqual Uri.Path("/")
      Path("").rightValue.asAkka shouldEqual Uri.Path("")
    }
  }

} 
Example 77
Source File: StorageDirectives.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage.routes

import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.Uri.Path
import akka.http.scaladsl.model.Uri.Path._
import akka.http.scaladsl.server.Directives.{extractUnmatchedPath, failWith, pass, provide, reject}
import akka.http.scaladsl.server._
import ch.epfl.bluebrain.nexus.storage.Rejection.{BucketNotFound, PathAlreadyExists, PathNotFound}
import ch.epfl.bluebrain.nexus.storage.StorageError.PathInvalid
import ch.epfl.bluebrain.nexus.storage.Storages
import ch.epfl.bluebrain.nexus.storage.Storages.PathExistence.{PathDoesNotExist, PathExists}
import ch.epfl.bluebrain.nexus.storage.Storages.BucketExistence.BucketExists

import scala.annotation.tailrec

object StorageDirectives {

  
  def extractRelativeFilePath(name: String): Directive1[Path] =
    extractRelativePath(name).flatMap {
      case path if path.reverse.startsWithSegment => provide(path)
      case path                                   => failWith(PathInvalid(name, path))
    }

  @tailrec
  private def relativize(path: Path): Path =
    path match {
      case Slash(rest) => relativize(rest)
      case rest        => rest
    }
} 
Example 78
Source File: File.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage

import akka.http.scaladsl.model.{ContentType, Uri}
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.storage.config.Contexts.resourceCtxUri
import scala.annotation.nowarn
import io.circe.generic.extras.Configuration
import io.circe.generic.extras.semiauto._
import io.circe.{Decoder, Encoder}

// $COVERAGE-OFF$
object File {

  @nowarn("cat=unused")
  implicit private val config: Configuration = Configuration.default
    .copy(transformMemberNames = {
      case "@context" => "@context"
      case key        => s"_$key"
    })

  
  final case class Digest(algorithm: String, value: String)

  object Digest {
    val empty: Digest                           = Digest("", "")
    implicit val digestEncoder: Encoder[Digest] = deriveConfiguredEncoder[Digest]
    implicit val digestDecoder: Decoder[Digest] = deriveConfiguredDecoder[Digest]
  }

}
// $COVERAGE-ON$ 
Example 79
Source File: Settings.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage.config

import java.nio.file.{Path, Paths}

import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import scala.annotation.nowarn
import com.typesafe.config.Config
import pureconfig.generic.auto._
import pureconfig.ConvertHelpers._
import pureconfig._


@SuppressWarnings(Array("LooksLikeInterpolatedString", "OptionGet"))
class Settings(config: Config) extends Extension {

  @nowarn("cat=unused")
  val appConfig: AppConfig = {
    implicit val uriConverter: ConfigConvert[Uri]                 =
      ConfigConvert.viaString[Uri](catchReadError(s => Uri(s)), _.toString)
    implicit val pathConverter: ConfigConvert[Path]               =
      ConfigConvert.viaString[Path](catchReadError(s => Paths.get(s)), _.toString)
    implicit val absoluteIriConverter: ConfigConvert[AbsoluteIri] =
      ConfigConvert.viaString[AbsoluteIri](catchReadError(s => url"$s"), _.toString)
    ConfigSource.fromConfig(config).at("app").loadOrThrow[AppConfig]
  }

}

object Settings extends ExtensionId[Settings] with ExtensionIdProvider {

  override def lookup(): ExtensionId[_ <: Extension] = Settings

  override def createExtension(system: ExtendedActorSystem): Settings = apply(system.settings.config)

  def apply(config: Config): Settings = new Settings(config)
} 
Example 80
Source File: StorageDirectivesSpec.scala    From nexus   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.storage.routes

import java.util.regex.Pattern.quote

import akka.http.scaladsl.model.{StatusCodes, Uri}
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.testkit.ScalatestRouteTest
import ch.epfl.bluebrain.nexus.storage.JsonLdCirceSupport._
import ch.epfl.bluebrain.nexus.storage.routes.Routes.exceptionHandler
import ch.epfl.bluebrain.nexus.storage.routes.StorageDirectives._
import ch.epfl.bluebrain.nexus.storage.utils.Resources
import io.circe.Json
import org.scalatest.Inspectors
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class StorageDirectivesSpec
    extends AnyWordSpecLike
    with Matchers
    with ScalatestRouteTest
    with Inspectors
    with Resources {

  "the storage directives" when {

    def pathInvalidJson(path: Uri.Path): Json =
      jsonContentOf(
        "/error.json",
        Map(
          quote("{type}") -> "PathInvalid",
          quote(
            "{reason}"
          )               -> s"The provided location inside the bucket 'name' with the relative path '$path' is invalid."
        )
      )

    "dealing with file path extraction" should {
      val route = handleExceptions(exceptionHandler) {
        (extractRelativeFilePath("name") & get) { path =>
          complete(s"$path")
        }
      }

      "reject when path contains 2 slashes" in {
        Get("///") ~> route ~> check {
          status shouldEqual StatusCodes.BadRequest
          responseAs[Json] shouldEqual pathInvalidJson(Uri.Path.Empty)
        }
      }

      "reject when path does not end with a segment" in {
        Get("/some/path/") ~> route ~> check {
          status shouldEqual StatusCodes.BadRequest
          responseAs[Json] shouldEqual pathInvalidJson(Uri.Path("some/path/"))
        }
      }

      "return path" in {
        Get("/some/path/file.txt") ~> route ~> check {
          responseAs[String] shouldEqual "some/path/file.txt"
        }
      }
    }

    "dealing with path validation" should {
      def route(path: Uri.Path) =
        handleExceptions(exceptionHandler) {
          (validatePath("name", path) & get) {
            complete(s"$path")
          }
        }

      "reject when some of the segments is . or .." in {
        val paths = List(Uri.Path("/./other/file.txt"), Uri.Path("/some/../file.txt"))
        forAll(paths) { path =>
          Get(path.toString()) ~> route(path) ~> check {
            status shouldEqual StatusCodes.BadRequest
            responseAs[Json] shouldEqual pathInvalidJson(path)
          }
        }
      }

      "pass" in {
        Get("/some/path") ~> route(Uri.Path("/some/path")) ~> check {
          handled shouldEqual true
        }
      }
    }
  }
} 
Example 81
Source File: WolframServiceImpl.scala    From lagom-on-kube   with Apache License 2.0 5 votes vote down vote up
package me.alexray.wolfram.impl

import java.net.URLEncoder

import akka.NotUsed
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, Uri}
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.Materializer
import akka.util.ByteString
import com.lightbend.lagom.scaladsl.api.ServiceCall
import me.alexray.wolfram.api.WolframService
import play.api.Configuration

import scala.concurrent.{ExecutionContext, Future}


class WolframServiceImpl(config: Configuration)
                        (implicit system: ActorSystem, mat: Materializer, ec: ExecutionContext)
  extends WolframService
{

  val appID = config.underlying.getString("wolfram.appid")
  val apiUrl = s"http://api.wolframalpha.com/v2/"


  override def query(q: String): ServiceCall[NotUsed, String] = ServiceCall { _ =>

    val url = apiUrl + s"query?appid=$appID&input=" + URLEncoder.encode(q, "UTF-8")

    for {
      response <- Http().singleRequest(HttpRequest(uri = Uri(url)))
      if response.status.isSuccess()
      data <- Unmarshal(response).to[String]
    } yield data

  }

  override def simple(q: String): ServiceCall[NotUsed, Array[Byte]] = ServiceCall { _ =>

    println(s"quetions = '$q'")

    val url = apiUrl + s"simple?appid=$appID&input=" +  URLEncoder.encode(q, "UTF-8").replace("+", "%20")

    println(s"url = '$url'")

    for {
      response <- Http().singleRequest(HttpRequest(uri = Uri(url)))
      if response.status.isSuccess()
      bytes <- Unmarshal(response).to[ByteString]
    } yield {
      println(s"received image ${bytes.size} bytes long")
      bytes.toArray
    }

  }
} 
Example 82
Source File: ProjectFetcherImpl.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.client.finder

import akka.http.scaladsl.model.Uri
import com.twitter.conversions.time._
import com.typesafe.config.Config
import cool.graph.shared.SchemaSerializer
import cool.graph.shared.models.ProjectWithClientId
import cool.graph.twitterFutures.TwitterFutureImplicits._

import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future

case class ProjectFetcherImpl(
    blockedProjectIds: Vector[String],
    config: Config
) extends RefreshableProjectFetcher {
  private val schemaManagerEndpoint = config.getString("schemaManagerEndpoint")
  private val schemaManagerSecret   = config.getString("schemaManagerSecret")

  private lazy val schemaService = {
    val client = if (schemaManagerEndpoint.startsWith("https")) {
      com.twitter.finagle.Http.client.withTls(Uri(schemaManagerEndpoint).authority.host.address())
    } else {
      com.twitter.finagle.Http.client
    }

    val destination = s"${Uri(schemaManagerEndpoint).authority.host}:${Uri(schemaManagerEndpoint).effectivePort}"
    client.withRequestTimeout(10.seconds).newService(destination)
  }

  override def fetchRefreshed(projectIdOrAlias: String): Future[Option[ProjectWithClientId]] = fetch(projectIdOrAlias, forceRefresh = true)
  override def fetch(projectIdOrAlias: String): Future[Option[ProjectWithClientId]]          = fetch(projectIdOrAlias, forceRefresh = false)

  
  private def fetch(projectIdOrAlias: String, forceRefresh: Boolean): Future[Option[ProjectWithClientId]] = {
    if (blockedProjectIds.contains(projectIdOrAlias)) {
      return Future.successful(None)
    }

    // load from backend-api-schema-manager service
    val uri = forceRefresh match {
      case true  => s"$schemaManagerEndpoint/$projectIdOrAlias?forceRefresh=true"
      case false => s"$schemaManagerEndpoint/$projectIdOrAlias"
    }

    val request = com.twitter.finagle.http
      .RequestBuilder()
      .url(uri)
      .addHeader("Authorization", s"Bearer $schemaManagerSecret")
      .buildGet()

    // schema deserialization failure should blow up as we have no recourse
    schemaService(request).map {
      case response if response.status.code >= 400 => None
      case response                                => Some(SchemaSerializer.deserializeProjectWithClientId(response.getContentString()).get)
    }.asScala
  }
} 
Example 83
Source File: UpdateFunction.scala    From graphcool-framework   with Apache License 2.0 5 votes vote down vote up
package cool.graph.system.mutactions.internal

import akka.http.scaladsl.model.Uri
import cool.graph.shared.errors.UserInputErrors._
import cool.graph.shared.NameConstraints
import cool.graph.shared.errors.UserInputErrors.{FunctionHasInvalidUrl, FunctionWithNameAlreadyExists, IllegalFunctionName, SchemaExtensionParseError}
import cool.graph.shared.models.{CustomMutationFunction, CustomQueryFunction, Function, FunctionDelivery, HttpFunction, Project}
import cool.graph.system.database.ModelToDbMapper
import cool.graph.system.database.tables.FunctionTable
import cool.graph.{MutactionVerificationSuccess, SystemSqlMutaction, SystemSqlStatementResult}
import slick.jdbc.MySQLProfile.api._
import slick.lifted.TableQuery

import scala.concurrent.Future
import scala.util.{Failure, Success, Try}

case class UpdateFunction(project: Project, newFunction: Function, oldFunction: Function) extends SystemSqlMutaction {

  override def execute: Future[SystemSqlStatementResult[Any]] = {

    implicit val FunctionBindingMapper                  = FunctionTable.FunctionBindingMapper
    implicit val FunctionTypeMapper                     = FunctionTable.FunctionTypeMapper
    implicit val RequestPipelineMutationOperationMapper = FunctionTable.RequestPipelineMutationOperationMapper

    val functions = TableQuery[FunctionTable]

    Future.successful {
      SystemSqlStatementResult {
        DBIO.seq(
          functions.filter(_.id === newFunction.id).update(ModelToDbMapper.convertFunction(project, newFunction))
        )
      }
    }
  }

  override def verify(): Future[Try[MutactionVerificationSuccess]] = FunctionVerification.verifyFunction(newFunction, project)

  override def rollback: Option[Future[SystemSqlStatementResult[Any]]] =
    Some(UpdateFunction(project = project, newFunction = oldFunction, oldFunction = newFunction).execute)

}

object FunctionVerification {

  def verifyFunction(function: Function, project: Project): Future[Try[MutactionVerificationSuccess] with Product with Serializable] = {

    def differentFunctionWithSameTypeName(name: String, id: String): Boolean = {
      project.customMutationFunctions.exists(func => func.payloadType.name == name && func.id != id) ||
      project.customQueryFunctions.exists(func => func.payloadType.name == name && func.id != id)

    }

    def differentFunctionWithSameName: Boolean = {
      project.functions.exists(func => func.name.toLowerCase == function.name.toLowerCase && func.id != function.id)
    }

    val typeNameViolation = function match {
      case f: CustomMutationFunction if project.models.map(_.name).contains(f.payloadType.name)     => List(f.payloadType.name)
      case f: CustomQueryFunction if project.models.map(_.name).contains(f.payloadType.name)        => List(f.payloadType.name)
      case f: CustomMutationFunction if differentFunctionWithSameTypeName(f.payloadType.name, f.id) => List(f.payloadType.name)
      case f: CustomQueryFunction if differentFunctionWithSameTypeName(f.payloadType.name, f.id)    => List(f.payloadType.name)
      case _                                                                                        => List.empty
    }

    def hasInvalidUrl = function.delivery match {
      case x: HttpFunction => Try(Uri(x.url)).isFailure
      case _               => false
    }

    def getInvalidUrl(delivery: FunctionDelivery) = delivery.asInstanceOf[HttpFunction].url

    def projectHasNameConflict = function match {
      case x: CustomQueryFunction    => project.hasSchemaNameConflict(x.queryName, function.id)
      case x: CustomMutationFunction => project.hasSchemaNameConflict(x.mutationName, function.id)
      case _                         => false
    }

    Future.successful(() match {
      case _ if !NameConstraints.isValidFunctionName(function.name) => Failure(IllegalFunctionName(function.name))
      case _ if typeNameViolation.nonEmpty                          => Failure(FunctionHasInvalidPayloadName(name = function.name, payloadName = typeNameViolation.head))
      case _ if differentFunctionWithSameName                       => Failure(FunctionWithNameAlreadyExists(name = function.name))
      case _ if hasInvalidUrl                                       => Failure(FunctionHasInvalidUrl(name = function.name, url = getInvalidUrl(function.delivery)))
      case _ if projectHasNameConflict                              => Failure(SchemaExtensionParseError(function.name, "Operation name would conflict with existing schema"))
      case _                                                        => Success(MutactionVerificationSuccess())
    })
  }

} 
Example 84
Source File: FindContactPoints.scala    From CM-Well   with Apache License 2.0 5 votes vote down vote up
package cmwell.analytics.util

import akka.actor.ActorSystem
import akka.http.scaladsl.model.HttpMethods.GET
import akka.http.scaladsl.model.{HttpRequest, Uri}
import akka.stream.ActorMaterializer
import com.fasterxml.jackson.databind.JsonNode

import scala.collection.JavaConverters._
import scala.concurrent.ExecutionContextExecutor
import scala.util.Try

// TODO: These s/b `Uri`s?
case class ContactPoints(cassandra: String,
                         es: String)

object FindContactPoints {

  
  def es(url: String)
        (implicit system: ActorSystem,
         executionContext: ExecutionContextExecutor,
         actorMaterializer: ActorMaterializer): String = {

    val uri = Uri(url)

    val request = HttpRequest(
      method = GET,
      uri = s"http://${uri.authority.host}:${uri.authority.port}/proc/health?format=json")

    val json: JsonNode = HttpUtil.jsonResult(request, "fetch /proc/health")

    val masterIpAddresses: Seq[String] = json.get("fields").findValue("masters").elements.asScala.map(_.textValue).toSeq

    if (masterIpAddresses.isEmpty)
      throw new RuntimeException("No master node addresses found.")

    // For Elasticsearch, the port is 9201 for a single node, and 9200 for clustered.
    val esPort = if (masterIpAddresses.lengthCompare(1) > 0) "9200" else "9201"

    // All the masters should be accessible, but verify that.
    // A better implementation would keep all the endpoints in the list, and we could fall back to the others
    // if the one we are using disappears.
    val firstAccessibleESEndpoint = masterIpAddresses.find { ipAddress =>
      val request = HttpRequest(
        method = GET,
        uri = s"http://$ipAddress:$esPort")

      Try(HttpUtil.result(request, "probe for accessible es endpoint")).isSuccess
    }

    if (firstAccessibleESEndpoint.isEmpty)
      throw new RuntimeException("No accessible ES endpoint was found.")

    s"${firstAccessibleESEndpoint.get}:$esPort"
  }
} 
Example 85
Source File: SimpleDowningSpec.scala    From simple-akka-downing   with Apache License 2.0 5 votes vote down vote up
package com.ajjpj.simpleakkadowning.util

import akka.actor.Props
import akka.cluster.{Cluster, MemberStatus}
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, Uri}
import akka.remote.testconductor.RoleName
import akka.remote.testkit.MultiNodeSpec
import akka.remote.transport.ThrottlerTransportAdapter.Direction
import akka.stream.ActorMaterializer
import akka.testkit.ImplicitSender

import scala.concurrent.duration._
import scala.util.control.NonFatal

abstract class SimpleDowningSpec(config: SimpleDowningConfig) extends MultiNodeSpec(config) with STMultiNodeSpec with ImplicitSender {

  def initialParticipants = roles.size

  private var portToNode = Map.empty[Int,RoleName]

  def init(): Unit = {
    if (roles.headOption contains myself) {
      enterBarrier("initialized")
    }
    else {
      val cluster = Cluster(system)
      cluster.joinSeedNodes(seedAddresses)
      system.actorOf(Props(new ClusterHttpInspector(httpPort(myself))), "http-server")

      while (cluster.state.members.count(_.status == MemberStatus.Up) < roles.tail.size) Thread.sleep(100)
      enterBarrier("initialized")
    }

    portToNode = roles.map(r => node(r).address.port.get -> r).toMap
  }

  def httpPort (node: RoleName) = {
    val nodeNo = roles.indexOf(node)
    require(nodeNo > 0)
    8080 + nodeNo
  }

  def seedAddresses = roles.tail.map(node(_).root.address)

  private def httpGetNodes(node: RoleName, path: String): Set[RoleName] = {
    try {
      import system.dispatcher
      implicit val mat = ActorMaterializer()

      val uri = Uri (s"http://localhost:${httpPort (node)}$path")
      val response = Http (system).singleRequest (HttpRequest (uri = uri)).await
      val strict = response.entity.toStrict (10.seconds).await
      strict.data.decodeString ("utf-8") match {
        case s if s.isEmpty => Set.empty
        case s => s.split (' ').map (_.toInt).map (portToNode).toSet
      }
    }
    catch {
      case NonFatal(th) =>
        th.printStackTrace()
        Set.empty
    }
  }

  def upNodesFor(node: RoleName) = httpGetNodes(node, "/cluster-members/up")
  def unreachableNodesFor (node: RoleName) = httpGetNodes(node, "/cluster-members/unreachable")

  
  def createPartition(nodes: RoleName*) = {
    val otherNodes = roles.tail.toSet -- nodes
    for (n1 <- nodes; n2 <- otherNodes) testConductor.blackhole(n1, n2, Direction.Both).await
  }

  def healPartition(): Unit = {
    for (n1 <- roles.tail; n2 <- roles.tail) testConductor.passThrough(n1, n2, Direction.Both).await
  }
} 
Example 86
Source File: FailoverTestGateway.scala    From affinity   with Apache License 2.0 5 votes vote down vote up
package io.amient.affinity.core.cluster

import akka.http.scaladsl.model.HttpMethods.{GET, POST}
import akka.http.scaladsl.model.StatusCodes.{NotFound, OK, SeeOther}
import akka.http.scaladsl.model.{HttpResponse, Uri, headers}
import akka.util.Timeout
import com.typesafe.config.ConfigFactory
import io.amient.affinity.core.actor.GatewayHttp
import io.amient.affinity.core.cluster.FailoverTestPartition.{GetValue, PutValue}
import io.amient.affinity.core.http.RequestMatchers.{HTTP, PATH}
import io.amient.affinity.core.http.{Encoder, HttpInterfaceConf}

import scala.collection.JavaConverters._
import scala.concurrent.duration._
import io.amient.affinity.core.ack
import scala.language.postfixOps

class FailoverTestGateway extends GatewayHttp {

  override val rejectSuspendedHttpRequests = false

  override def listenerConfigs: Seq[HttpInterfaceConf] = List(HttpInterfaceConf(
    ConfigFactory.parseMap(Map("host" -> "127.0.0.1", "port" -> "0").asJava)))

  implicit val executor = scala.concurrent.ExecutionContext.Implicits.global

  implicit val scheduler = context.system.scheduler

  val keyspace1 = keyspace("keyspace1")

  override def handle: Receive = {
    case HTTP(GET, PATH(key), _, response) => handleWith(response) {
      implicit val timeout = Timeout(1 seconds)
      keyspace1 ?! GetValue(key) map {
        _ match {
          case None => HttpResponse(NotFound)
          case Some(value) => Encoder.json(OK, value, gzip = false)
        }
      }
    }

    case HTTP(POST, PATH(key, value), _, response) => handleWith(response) {
      implicit val timeout = Timeout(1 seconds)
      keyspace1 ?! PutValue(key, value) map {
        case _ => HttpResponse(SeeOther, headers = List(headers.Location(Uri(s"/$key"))))
      }
    }
  }
} 
Example 87
Source File: Settings.scala    From nexus-iam   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.iam.config

import akka.actor.{ExtendedActorSystem, Extension, ExtensionId, ExtensionIdProvider}
import akka.http.scaladsl.model.Uri
import ch.epfl.bluebrain.nexus.iam.types.Permission
import com.github.ghik.silencer.silent
import com.typesafe.config.Config
import pureconfig.generic.auto._
import pureconfig.ConvertHelpers._
import pureconfig._


@SuppressWarnings(Array("LooksLikeInterpolatedString"))
class Settings(config: Config) extends Extension {

  @silent // not recognized as used... but it is below
  private implicit val uriConverter: ConfigConvert[Uri] =
    ConfigConvert.viaString[Uri](catchReadError(Uri(_)), _.toString)

  @silent // not recognized as used... but it is below
  private implicit val permissionConverter: ConfigConvert[Permission] =
    ConfigConvert.viaString[Permission](optF(Permission(_)), _.toString)

  val appConfig: AppConfig =
    ConfigSource.fromConfig(config).at("app").loadOrThrow[AppConfig]
}

object Settings extends ExtensionId[Settings] with ExtensionIdProvider {

  override def lookup(): ExtensionId[_ <: Extension] = Settings

  override def createExtension(system: ExtendedActorSystem): Settings = apply(system.settings.config)

  def apply(config: Config): Settings = new Settings(config)
} 
Example 88
Source File: GrpcRequestHelpers.scala    From akka-grpc   with Apache License 2.0 5 votes vote down vote up
package akka.grpc.internal

import akka.actor.ActorSystem
import akka.actor.ClassicActorSystemProvider
import akka.grpc.{ ProtobufSerializer, Trailers }
import akka.grpc.GrpcProtocol.GrpcProtocolWriter
import akka.http.scaladsl.model.HttpEntity.ChunkStreamPart
import akka.stream.scaladsl.Source
import akka.NotUsed
import akka.annotation.InternalApi
import akka.grpc.scaladsl.{ headers, GrpcExceptionHandler }
import akka.http.scaladsl.model.{ HttpEntity, HttpMethods, HttpRequest, Uri }
import io.grpc.Status

import scala.collection.immutable

@InternalApi
object GrpcRequestHelpers {

  def apply[T](
      uri: Uri,
      e: Source[T, NotUsed],
      eHandler: ActorSystem => PartialFunction[Throwable, Trailers] = GrpcExceptionHandler.defaultMapper)(
      implicit m: ProtobufSerializer[T],
      writer: GrpcProtocolWriter,
      system: ClassicActorSystemProvider): HttpRequest =
    request(uri, GrpcEntityHelpers(e, Source.single(GrpcEntityHelpers.trailer(Status.OK)), eHandler))

  private def request[T](uri: Uri, entity: Source[ChunkStreamPart, NotUsed])(
      implicit writer: GrpcProtocolWriter): HttpRequest = {
    HttpRequest(
      uri = uri,
      method = HttpMethods.POST,
      headers = immutable.Seq(
        headers.`Message-Encoding`(writer.messageEncoding.name),
        headers.`Message-Accept-Encoding`(Codecs.supportedCodecs.map(_.name).mkString(","))),
      entity = HttpEntity.Chunked(writer.contentType, entity))
  }

} 
Example 89
Source File: AkkaHttpActionAdapter.scala    From akka-http-pac4j   with Mozilla Public License 2.0 5 votes vote down vote up
package com.stackstate.pac4j.http

import akka.http.scaladsl.model.{HttpEntity, HttpHeader, HttpResponse, StatusCodes, Uri}
import org.pac4j.core.context.HttpConstants
import org.pac4j.core.http.adapter.HttpActionAdapter
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.model.headers.Location
import akka.http.scaladsl.server.RouteResult
import akka.http.scaladsl.server.RouteResult.Complete
import com.stackstate.pac4j.AkkaHttpWebContext
import org.pac4j.core.exception.http.{
  BadRequestAction,
  ForbiddenAction,
  FoundAction,
  HttpAction,
  NoContentAction,
  OkAction,
  SeeOtherAction,
  TemporaryRedirectAction,
  UnauthorizedAction
}

import scala.concurrent.Future

object AkkaHttpActionAdapter extends HttpActionAdapter[Future[RouteResult], AkkaHttpWebContext] {
  override def adapt(action: HttpAction, context: AkkaHttpWebContext): Future[Complete] = {
    Future.successful(Complete(action match {
      case _: UnauthorizedAction =>
        // XHR requests don't receive a TEMP_REDIRECT but a UNAUTHORIZED. The client can handle this
        // to trigger the proper redirect anyway, but for a correct flow the session cookie must be set
        context.addResponseSessionCookie()
        HttpResponse(Unauthorized)
      case _: BadRequestAction =>
        HttpResponse(BadRequest)
      case _ if action.getCode == HttpConstants.CREATED =>
        HttpResponse(Created)
      case _: ForbiddenAction =>
        HttpResponse(Forbidden)
      case a: FoundAction =>
        context.addResponseSessionCookie()
        HttpResponse(SeeOther, headers = List[HttpHeader](Location(Uri(a.getLocation))))
      case a: SeeOtherAction =>
        context.addResponseSessionCookie()
        HttpResponse(SeeOther, headers = List[HttpHeader](Location(Uri(a.getLocation))))
      case a: OkAction =>
        val contentBytes = a.getContent.getBytes
        val entity = context.getContentType.map(ct => HttpEntity(ct, contentBytes)).getOrElse(HttpEntity(contentBytes))
        HttpResponse(OK, entity = entity)
      case _: NoContentAction =>
        HttpResponse(NoContent)
      case _ if action.getCode == 500 =>
        HttpResponse(InternalServerError)
      case _ =>
        HttpResponse(StatusCodes.getForKey(action.getCode).getOrElse(custom(action.getCode, "")))
    }))
  }
} 
Example 90
Source File: ChatClient.scala    From akka-http-scala-js-websocket-chat   with MIT License 5 votes vote down vote up
package example.akkawschat.cli

import scala.concurrent.Future

import akka.actor.ActorSystem

import akka.stream.scaladsl.{ Keep, Source, Sink, Flow }

import akka.http.scaladsl.Http
import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.ws._

import upickle.default._

import shared.Protocol

object ChatClient {
  def connect[T](endpoint: Uri, handler: Flow[Protocol.Message, String, T])(implicit system: ActorSystem): Future[T] = {
    val wsFlow: Flow[Message, Message, T] =
      Flow[Message]
        .collect {
          case TextMessage.Strict(msg) ⇒ read[Protocol.Message](msg)
        }
        .viaMat(handler)(Keep.right)
        .map(TextMessage(_))

    val (fut, t) = Http().singleWebSocketRequest(WebSocketRequest(endpoint), wsFlow)
    fut.map {
      case v: ValidUpgrade                         ⇒ t
      case InvalidUpgradeResponse(response, cause) ⇒ throw new RuntimeException(s"Connection to chat at $endpoint failed with $cause")
    }(system.dispatcher)
  }

  def connect[T](endpoint: Uri, in: Sink[Protocol.Message, Any], out: Source[String, Any])(implicit system: ActorSystem): Future[Unit] =
    connect(endpoint, Flow.fromSinkAndSource(in, out)).map(_ ⇒ ())(system.dispatcher)

  def connect[T](endpoint: Uri, onMessage: Protocol.Message ⇒ Unit, out: Source[String, Any])(implicit system: ActorSystem): Future[Unit] =
    connect(endpoint, Sink.foreach(onMessage), out)
} 
Example 91
Source File: ChatCLI.scala    From akka-http-scala-js-websocket-chat   with MIT License 5 votes vote down vote up
package example.akkawschat.cli

import akka.actor.ActorSystem

import akka.stream.scaladsl.{ Flow, Source }
import akka.http.scaladsl.model.Uri
import shared.Protocol

import scala.util.{ Failure, Success }

object ChatCLI extends App {
  def promptForName(): String = {
    Console.out.print("What's your name? ")
    Console.out.flush()
    Console.in.readLine()
  }

  val endpointBase = "ws://localhost:8080/chat"
  val name = promptForName()

  val endpoint = Uri(endpointBase).withQuery(Uri.Query("name" -> name))

  implicit val system = ActorSystem()
  import system.dispatcher

  import Console._
  def formatCurrentMembers(members: Seq[String]): String =
    s"(${members.size} people chatting: ${members.map(m ⇒ s"$YELLOW$m$RESET").mkString(", ")})"

  object ChatApp extends ConsoleDSL[String] {
    type State = Seq[String] // current chat members
    def initialState: Seq[String] = Nil

    def run(): Unit = {
      lazy val initialCommands =
        Command.PrintLine("Welcome to the Chat!") ~ readLineAndEmitLoop

      val inputFlow =
        Flow[Protocol.Message]
          .map {
            case Protocol.ChatMessage(sender, message) ⇒ Command.PrintLine(s"$YELLOW$sender$RESET: $message")
            case Protocol.Joined(member, all)          ⇒ Command.PrintLine(s"$YELLOW$member$RESET ${GREEN}joined!$RESET ${formatCurrentMembers(all)}") ~ Command.SetState(all)
            case Protocol.Left(member, all)            ⇒ Command.PrintLine(s"$YELLOW$member$RESET ${RED}left!$RESET ${formatCurrentMembers(all)}") ~ Command.SetState(all)
          }
          // inject initial commands before the commands generated by the server
          .prepend(Source.single(initialCommands))

      val appFlow =
        inputFlow
          .via(consoleHandler)
          .filterNot(_.trim.isEmpty)
          .watchTermination()((_, f) => f onComplete {
            case Success(_) =>
              println("\nFinishing...")
              system.terminate()
            case Failure(e) ⇒
              println(s"Connection to $endpoint failed because of '${e.getMessage}'")
              system.terminate()
          })

      println("Connecting... (Use Ctrl-D to exit.)")
      ChatClient.connect(endpoint, appFlow)
    }

    val basePrompt = s"($name) >"

    lazy val readLineAndEmitLoop: Command =
      readWithParticipantNameCompletion { line ⇒
        Command.Emit(line) ~ readLineAndEmitLoop
      }

    def readWithParticipantNameCompletion(andThen: String ⇒ Command): Command = {
      import Command._

       ⇒
                if (namePrefix.isEmpty) simpleMode(prefix)
                else mentionMode(prefix, namePrefix.dropRight(1))
            }
          }
      }

      simpleMode("")
    }

  }
  ChatApp.run()
} 
Example 92
Source File: UriUtils.scala    From asura   with MIT License 5 votes vote down vote up
package asura.core.http

import java.net.{URLDecoder, URLEncoder}
import java.nio.charset.StandardCharsets

import akka.http.scaladsl.model.Uri
import asura.common.exceptions.InvalidStatusException
import asura.common.util.StringUtils
import asura.core.es.model.HttpCaseRequest
import asura.core.protocols.Protocols
import asura.core.runtime.RuntimeContext
import asura.core.util.StringTemplate

object UriUtils {

  val UTF8 = StandardCharsets.UTF_8.name()

  def toUri(cs: HttpCaseRequest, context: RuntimeContext): Uri = {
    Uri.from(
      scheme = StringUtils.notEmptyElse(cs.request.protocol, Protocols.HTTP),
      host = context.renderSingleMacroAsString(URLDecoder.decode(cs.request.host, UTF8)),
      port = if (cs.request.port < 0 || cs.request.port > 65535) 80 else cs.request.port,
      path = renderPath(URLDecoder.decode(cs.request.urlPath, UTF8), cs, context),
      queryString = buildQueryString(cs, context)
    )
  }

  def mapToQueryString(map: Map[String, Any], context: RuntimeContext = null): String = {
    val sb = StringBuilder.newBuilder
    for ((k, v) <- map) {
      v match {
        case v: String =>
          val renderedValue = if (null != context) context.renderSingleMacroAsString(v) else v
          sb.append(k).append("=").append(URLEncoder.encode(renderedValue, UTF8)).append("&")
        case v: List[_] =>
          v.foreach(i => {
            val value = i.toString
            val renderedValue = if (null != context) context.renderSingleMacroAsString(value) else value
            sb.append(k).append("=").append(URLEncoder.encode(renderedValue, UTF8)).append("&")
          })
      }
    }
    if (sb.nonEmpty) {
      sb.deleteCharAt(sb.length - 1)
    }
    sb.toString
  }

  @throws[InvalidStatusException]("if path template variable not in cs")
  def renderPath(tpl: String, cs: HttpCaseRequest, context: RuntimeContext): String = {
    if (null != cs.request) {
      val params = cs.request.path
      if (null != params && params.nonEmpty) {
        val ctx = params.map(param => param.key -> context.renderSingleMacroAsString(param.value)).toMap
        StringTemplate.uriPathParse(tpl, ctx)
      } else {
        tpl
      }
    } else {
      tpl
    }
  }

  def buildQueryString(cs: HttpCaseRequest, context: RuntimeContext): Option[String] = {
    if (null != cs.request) {
      val params = cs.request.query
      if (null != params && params.nonEmpty) {
        val sb = StringBuilder.newBuilder
        for (param <- params if param.enabled) {
          val key = if (StringUtils.isNotEmpty(param.key)) {
            URLEncoder.encode(param.key, UTF8)
          } else {
            StringUtils.EMPTY
          }
          val value = if (StringUtils.isNotEmpty(param.value)) {
            URLEncoder.encode(context.renderSingleMacroAsString(param.value), UTF8)
          } else {
            StringUtils.EMPTY
          }
          sb.append(key).append("=").append(value).append("&")
        }
        if (sb.nonEmpty) {
          sb.deleteCharAt(sb.length - 1)
        }
        Some(sb.toString)
      } else {
        None
      }
    } else {
      None
    }
  }
} 
Example 93
Source File: DefaultHttpEndpointResolver.scala    From squbs   with Apache License 2.0 5 votes vote down vote up
package org.squbs.httpclient

import akka.http.scaladsl.model.Uri
import com.typesafe.scalalogging.LazyLogging
import org.squbs.env.Environment
import org.squbs.resolver.Resolver

import scala.util.Try


class DefaultHttpEndpointResolver extends Resolver[HttpEndpoint] with LazyLogging {

  override def name: String = getClass.getName

  override def resolve(name: String, env: Environment): Option[HttpEndpoint] = {
    Try(Uri(name)).toOption match {
      case Some(uri) if uri.scheme == "http" || uri.scheme == "https" => Some(HttpEndpoint(uri))
      case _ =>
        logger.debug(s"Could not resolve to an HttpEndpoint.  Invalid http URI: $name")
        None
    }
  }
} 
Example 94
Source File: DefaultHttpEndpointResolverSpec.scala    From squbs   with Apache License 2.0 5 votes vote down vote up
package org.squbs.httpclient

import akka.http.scaladsl.model.Uri
import org.scalatest.OptionValues._
import org.scalatest.{FlatSpecLike, Matchers}
import org.squbs.resolver.ResolverRegistry
import org.squbs.testkit.CustomTestKit

class DefaultHttpEndpointResolverSpec extends CustomTestKit with FlatSpecLike with Matchers {

  ResolverRegistry(system).register(new DefaultHttpEndpointResolver)

  private def resolve(uri: String) = ResolverRegistry(system).resolve[HttpEndpoint](uri).value

  it should "resolve valid http uri string to an HttpEndpoint" in {
    resolve("http://akka.io:80") shouldBe HttpEndpoint(Uri("http://akka.io:80"))
    resolve("http://akka.io") shouldBe HttpEndpoint(Uri("http://akka.io"))
  }

  it should "resolve valid https uri string to an HttpEndpoint" in {
    resolve("https://akka.io:443") shouldBe HttpEndpoint(Uri("https://akka.io:443"))
    resolve("https://akka.io") shouldBe HttpEndpoint(Uri("https://akka.io"))
  }

  it should "not resolve invalid http uri string to an HttpEndpoint" in {
    ResolverRegistry(system).resolve[HttpEndpoint]("invalidUri:") shouldBe empty
    ResolverRegistry(system).resolve[HttpEndpoint]("ftp://akka.io") shouldBe empty
  }

  it should "set the resolver name to the class name" in {
    (new DefaultHttpEndpointResolver).name shouldEqual "org.squbs.httpclient.DefaultHttpEndpointResolver"
  }
} 
Example 95
Source File: package.scala    From squbs   with Apache License 2.0 5 votes vote down vote up
package org.squbs

import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, HttpResponse, Uri}
import akka.stream.ActorMaterializer
import akka.util.ByteString

import scala.concurrent.Future

package object testkit {
  case object TestPing
  case object TestPong

  def entityAsString(uri: String)(implicit am: ActorMaterializer, system: ActorSystem): Future[String] = {
    import system.dispatcher
    get(uri) flatMap extractEntityAsString
  }

  def get(uri: String)(implicit am: ActorMaterializer, system: ActorSystem): Future[HttpResponse] = {
    Http().singleRequest(HttpRequest(uri = Uri(uri)))
  }

  def extractEntityAsString(response: HttpResponse)
                           (implicit am: ActorMaterializer, system: ActorSystem): Future[String] = {
    import system.dispatcher
    response.entity.dataBytes.runFold(ByteString(""))(_ ++ _) map(_.utf8String)
  }
} 
Example 96
Source File: PerpetualStreamMergeHubJSpec.scala    From squbs   with Apache License 2.0 5 votes vote down vote up
package org.squbs.stream

import akka.actor.{ActorRef, ActorSystem}
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, Uri}
import akka.pattern.ask
import akka.stream.ActorMaterializer
import akka.testkit.TestKit
import com.typesafe.config.ConfigFactory
import org.scalatest.{FlatSpecLike, Matchers}
import org.squbs.unicomplex.Timeouts.{awaitMax, _}
import org.squbs.unicomplex._

import scala.collection.mutable
import scala.concurrent.Await

object PerpetualStreamMergeHubJSpec {
  val dummyJarsDir = getClass.getClassLoader.getResource("classpaths").getPath
  val classPaths = Array("JavaPerpetualStreamMergeHubSpec") map (dummyJarsDir + "/" + _)

  val config = ConfigFactory.parseString(
    s"""
       |squbs {
       |  actorsystem-name = JavaPerpetualStreamMergeHubSpec
       |  ${JMX.prefixConfig} = true
       |}
       |default-listener.bind-port = 0
      """.stripMargin
  )

  val boot = UnicomplexBoot(config)
    .createUsing {
      (name, config) => ActorSystem(name, config)
    }
    .scanComponents(classPaths)
    .start()
}

class PerpetualStreamMergeHubJSpec extends TestKit(PerpetualStreamMergeHubJSpec.boot.actorSystem)
  with FlatSpecLike with Matchers  {

  val portBindings = Await.result((Unicomplex(system).uniActor ? PortBindings).mapTo[Map[String, Int]], awaitMax)
  val psActorName = "/user/JavaPerpetualStreamMergeHubSpec/perpetualStreamWithMergeHub"
  val actorRef = Await.result((system.actorSelection(psActorName) ? RetrieveMyMessageStorageActorRef).mapTo[ActorRef],
    awaitMax)
  val port = portBindings("default-listener")


  it should "connect streams with mergehub" in {

    implicit val ac = ActorMaterializer()
    Http().singleRequest(HttpRequest(uri = Uri(s"http://127.0.0.1:$port/mergehub"), entity = "10"))
    Http().singleRequest(HttpRequest(uri = Uri(s"http://127.0.0.1:$port/mergehub"), entity = "11"))

    awaitAssert {
      val messages = Await.result((actorRef ? RetrieveMyMessages).mapTo[mutable.Set[MyMessage]], awaitMax)
      messages should have size 2
      messages should contain(MyMessage(10))
      messages should contain(MyMessage(11))
    }
  }
} 
Example 97
Source File: CubeActorErrorStatesSpec.scala    From squbs   with Apache License 2.0 5 votes vote down vote up
package org.squbs.unicomplex

import javax.management.ObjectName
import javax.management.openmbean.CompositeData

import akka.actor.{Actor, ActorSystem}
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, Uri}
import akka.pattern._
import akka.stream.ActorMaterializer
import akka.testkit.{ImplicitSender, TestKit}
import com.typesafe.config.ConfigFactory
import org.scalatest.OptionValues._
import org.scalatest.{BeforeAndAfterAll, FlatSpecLike, Matchers}
import org.squbs.lifecycle.GracefulStop
import org.squbs.unicomplex.Timeouts._

import scala.concurrent.Await

object CubeActorErrorStatesSpec{

  val classPaths = Array(getClass.getClassLoader.getResource("classpaths/CubeActorErrorStates").getPath)

  val config = ConfigFactory.parseString(
    s"""
       |default-listener.bind-port = 0
       |squbs {
       |  actorsystem-name = cubeActorErrorStatesSpec
       |  ${JMX.prefixConfig} = true
       |}
    """.stripMargin
  )

  val boot = UnicomplexBoot(config)
    .createUsing {(name, config) => ActorSystem(name, config)}
    .scanComponents(classPaths)
    .initExtensions.start()
}

class CubeActorErrorStatesSpec extends TestKit(CubeActorErrorStatesSpec.boot.actorSystem)
  with FlatSpecLike with Matchers with ImplicitSender with BeforeAndAfterAll {

  val portBindings = Await.result((Unicomplex(system).uniActor ? PortBindings).mapTo[Map[String, Int]], awaitMax)
  val port = portBindings("default-listener")


  implicit val am = ActorMaterializer()

  override def afterAll(): Unit = {
    Unicomplex(system).uniActor ! GracefulStop
  }

  "Route" should "handle request with empty web-context" in {
    Http().singleRequest(HttpRequest(uri = Uri(s"http://127.0.0.1:$port/test2?msg=1")))
    Thread.sleep(100)
    Http().singleRequest(HttpRequest(uri = Uri(s"http://127.0.0.1:$port/test1?msg=1")))
    Thread.sleep(100)
    Http().singleRequest(HttpRequest(uri = Uri(s"http://127.0.0.1:$port/test1?msg=2")))
    Thread.sleep(1000) // wait the agent get refreshed
    import org.squbs.unicomplex.JMX._
    val errorStates = get(new ObjectName(prefix(system) + cubeStateName + "CubeActorErrorStates"), "ActorErrorStates")
      .asInstanceOf[Array[CompositeData]]
    errorStates should have length 2
    val state1 = errorStates.find(_.get("actorPath") == "/user/CubeActorErrorStates/test1-CubeActorTest-handler").value
    state1.get("errorCount") shouldBe 2
    state1.get("latestException").asInstanceOf[String] should include ("test1:2")
    val state2 = errorStates.find(_.get("actorPath") == "/user/CubeActorErrorStates/test2-CubeActorTest-handler").value
    state2.get("errorCount") shouldBe 1
    state2.get("latestException").asInstanceOf[String] should include ("test2:1")
  }
}

class CubeActorTest extends Actor {
  override def receive: Receive = {
    case r: HttpRequest =>
      val msg = r.uri.query().get("msg").getOrElse("")
      throw new RuntimeException(s"${r.uri.path}:$msg")
  }
} 
Example 98
Source File: RegisterContextSpec.scala    From squbs   with Apache License 2.0 5 votes vote down vote up
package org.squbs.unicomplex

import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.Uri.Path
import akka.http.scaladsl.model.Uri.Path.{Empty, Segment, Slash}
import org.scalatest.{FlatSpecLike, Matchers}

class RegisterContextSpec extends FlatSpecLike with Matchers {


  "Path matching" should "work" in {
    val emptyPath = Path("")
    emptyPath shouldBe empty
    emptyPath should have length 0
    emptyPath.charCount should be (0)
    println(emptyPath.getClass.getName)
    emptyPath should be (Empty)
    emptyPath should not be 'startsWithSegment
    emptyPath should not be 'startsWithSlash
    emptyPath.startsWith(Empty) should be (true)

    val root = Path("/")
    root should not be empty
    root should have length 1
    root.charCount should be (1)
    println(root.getClass.getName)
    root shouldBe a [Slash]
    root should not be 'startsWithSegment
    root shouldBe 'startsWithSlash
    root.startsWith(Empty) should be (true)
    root.head should be ('/')
    root.tail should be (Empty)

    val single = Path("/abc")
    single should not be empty
    single should have length 2
    single.charCount should be (4)
    println(single.getClass.getName)
    single shouldBe a[Slash]
    single should not be 'startsWithSegment
    single shouldBe 'startsWithSlash
    single.startsWith(Path("/")) should be (true)
    single.startsWith(Path("")) should be (true)
    single.startsWith(Path("abc")) should be (false)
    single.head should be ('/')
    single.tail should be (Path("abc"))

    val simple = Path("abc")
    simple should not be empty
    simple should have length 1
    simple.charCount should be (3)
    println(simple.getClass.getName)
    simple shouldBe a[Segment]
    simple shouldBe 'startsWithSegment
    simple should not be 'startsWithSlash
    simple.startsWith(Path("/")) should be (false)
    simple.startsWith(Path("")) should be (true)
    simple.startsWith(Path("abc")) should be (true)
    simple.head should be ("abc")
    simple.tail should be (Empty)

    val multi = Path("abc/def")
    multi should not be empty
    multi should have length 3
    multi.charCount should be (7)
    println(multi.getClass.getName)
    multi shouldBe a[Segment]
    multi shouldBe 'startsWithSegment
    multi should not be 'startsWithSlash
    multi.startsWith(Path("/")) should be (false)
    multi.startsWith(Path("")) should be (true)
    multi.startsWith(Path("abc")) should be (true)
    multi.head should be ("abc")
    multi.tail shouldBe a [Slash]
    multi.startsWith(Path("abc/de")) should be (true)

  }

  "request path matching" should "work" in {

    Uri("http://www.ebay.com").path should not be 'startsWithSlash
    Uri("http://www.ebay.com").path should not be 'startsWithSegment
    Uri("http://www.ebay.com").path.startsWith(Empty) should be (true)
    Uri("http://www.ebay.com/").path shouldBe 'startsWithSlash
    Uri("http://www.ebay.com/").path should not be 'startsWithSegment
    Uri("http://www.ebay.com/").path.startsWith(Empty) should be (true)
    Uri("http://www.ebay.com").path should be (Path(""))
    Uri("http://www.ebay.com/").path should be (Path("/"))
    Uri("http://127.0.0.1:8080/abc").path shouldBe 'startsWithSlash
    Uri("http://www.ebay.com/").path.startsWith(Path("")) should be (true)
    Uri("http://www.ebay.com").path.startsWith(Path("")) should be (true)
    Uri("http://www.ebay.com/abc").path.startsWith(Path("")) should be (true)
    Uri("http://www.ebay.com/abc").path.tail.startsWith(Path("")) should be (true)
    Uri("http://www.ebay.com/abc").path.tail.startsWith(Path("abc")) should be (true)
    Uri("http://www.ebay.com/abc/def").path.tail.startsWith(Path("abc")) should be (true)
    Uri("http://www.ebay.com/abc/def").path.tail.startsWith(Path("abc/def")) should be (true)

  }
} 
Example 99
Source File: WSSecurityProperties.scala    From scala-loci   with Apache License 2.0 5 votes vote down vote up
package loci
package communicator
package ws.akka

import java.security.cert.Certificate

import akka.http.scaladsl.model.{HttpMessage, HttpRequest, HttpResponse, Uri}
import akka.http.scaladsl.model.headers._
import akka.http.scaladsl.model.ws.WebSocketRequest

private case class WSSecurityProperties(
  isAuthenticated: Boolean, isProtected: Boolean, isEncrypted: Boolean,
  certificates: Seq[Certificate])

private object WSSecurityProperties {
  final val HTTPS = "https"
  final val WSS = "wss"
  final val NoProtocol = "NONE" // TLSv1, etc.
  final val NoCipher = "SSL_NULL_WITH_NULL_NULL" // see RFC2246, RFC3268, etc.
  final val NoEncryptionFragment = "WITH_NULL"

  def apply(request: WebSocketRequest, response: HttpResponse,
      authenticated: Boolean): WSSecurityProperties =
    create(request.uri, response, authenticated)

  def apply(request: HttpRequest, authenticated: Boolean): WSSecurityProperties =
    create(request.uri, request, authenticated)

  private def create(uri: Uri, message: HttpMessage, authenticated: Boolean)
    : WSSecurityProperties = {

    val tls = uri.scheme == HTTPS || uri.scheme == WSS

    val properties = message.header[`Tls-Session-Info`] map { info =>
      val protocol = info.session.getProtocol
      val cipher = info.session.getCipherSuite

      val tls = protocol != NoProtocol && cipher != NoCipher

      val certificates = info.peerCertificates
      val isAuthenticated = tls && certificates.nonEmpty
      val isProtected = tls
      val isEncrypted = tls && !(cipher contains NoEncryptionFragment)

      WSSecurityProperties(authenticated || isAuthenticated, isProtected, isEncrypted, certificates)
    }

    properties getOrElse { WSSecurityProperties(authenticated, tls, tls, Seq.empty) }
  }
} 
Example 100
Source File: SseConnector.scala    From vamp   with Apache License 2.0 5 votes vote down vote up
package io.vamp.common.http

import akka.Done
import akka.actor.ActorSystem
import akka.event.LoggingAdapter
import akka.http.scaladsl.model.HttpHeader.ParsingResult.Ok
import akka.http.scaladsl.model.sse.ServerSentEvent
import akka.http.scaladsl.model.{ HttpHeader, HttpRequest, HttpResponse, Uri }
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{ Sink, Source }
import io.vamp.common.http.EventSource.EventSource

import scala.collection.mutable
import scala.concurrent.Future
import scala.concurrent.duration.{ FiniteDuration, _ }
import scala.language.postfixOps
import scala.util.{ Failure, Success }

private case class SseConnectionConfig(url: String, headers: List[(String, String)], tlsCheck: Boolean)

private case class SseConnectionEntryValue(source: EventSource)

trait SseListener {
  def onEvent(event: ServerSentEvent): Unit
}

object SseConnector {

  private val retryDelay: FiniteDuration = 5 second
  private val listeners: mutable.Map[SseConnectionConfig, Set[SseListener]] = mutable.Map()
  private val connections: mutable.Map[SseConnectionConfig, Future[Done]] = mutable.Map()

  def open(url: String, headers: List[(String, String)] = Nil, tlsCheck: Boolean)(listener: SseListener)(implicit system: ActorSystem, logger: LoggingAdapter): Unit = synchronized {
    val config = SseConnectionConfig(url, headers, tlsCheck)
    implicit val materializer: ActorMaterializer = ActorMaterializer()

    listeners.update(config, listeners.getOrElse(config, Set()) + listener)

    connections.getOrElseUpdate(config, {
      logger.info(s"Opening SSE connection: $url")
      EventSource(Uri(url), send(config), None, retryDelay).takeWhile { event ⇒
        event.eventType.foreach(t ⇒ logger.info(s"SSE: $t"))
        val receivers = listeners.getOrElse(config, Set())
        receivers.foreach(_.onEvent(event))
        val continue = receivers.nonEmpty
        if (!continue) logger.info(s"Closing SSE connection: $url")
        continue
      }.runWith(Sink.ignore)
    })
  }

  def close(listener: SseListener): Unit = synchronized {
    listeners.transform((_, v) ⇒ v - listener)
  }

  private def send(config: SseConnectionConfig)(request: HttpRequest)(implicit system: ActorSystem, materializer: ActorMaterializer): Future[HttpResponse] = {
    val httpHeaders = config.headers.map { case (k, v) ⇒ HttpHeader.parse(k, v) } collect { case Ok(h, _) ⇒ h } filterNot request.headers.contains
    Source.single(request.withHeaders(request.headers ++ httpHeaders) → 1).via(HttpClient.pool[Any](config.url, config.tlsCheck)).map {
      case (Success(response: HttpResponse), _) ⇒ response
      case (Failure(f), _)                      ⇒ throw new RuntimeException(f.getMessage)
    }.runWith(Sink.head)
  }
} 
Example 101
Source File: QueryResultEncoder.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.search

import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.Uri.Query
import ch.epfl.bluebrain.nexus.commons.search.QueryResult.{ScoredQueryResult, UnscoredQueryResult}
import ch.epfl.bluebrain.nexus.commons.search.QueryResults.{ScoredQueryResults, UnscoredQueryResults}
import ch.epfl.bluebrain.nexus.commons.search.{FromPagination, QueryResult, QueryResults}
import ch.epfl.bluebrain.nexus.kg.config.AppConfig.HttpConfig
import ch.epfl.bluebrain.nexus.kg.config.Contexts.{resourceCtxUri, searchCtxUri}
import ch.epfl.bluebrain.nexus.kg.config.Vocabulary.nxv
import ch.epfl.bluebrain.nexus.kg.directives.QueryDirectives.{after, from, size}
import ch.epfl.bluebrain.nexus.kg.indexing.SparqlLink
import ch.epfl.bluebrain.nexus.rdf.implicits._
import io.circe.syntax._
import io.circe.{Encoder, Json}
trait LowPriorityQueryResultsEncoder {

  implicit def qrsEncoderLowPrio[A: Encoder]: Encoder[QueryResults[A]] =
    Encoder.instance(qrsEncoderJsonLinks[A](None).apply(_))

  private implicit val uriEncoder: Encoder[Uri] = Encoder.encodeString.contramap(_.toString)

  protected def qrsEncoderJsonLinks[A: Encoder](next: Option[Uri]): Encoder[QueryResults[A]] = {
    implicit def qrEncoderJson: Encoder[QueryResult[A]] = Encoder.instance {
      case UnscoredQueryResult(v) => v.asJson.removeKeys(nxv.original_source.prefix)
      case ScoredQueryResult(score, v) =>
        v.asJson.removeKeys(nxv.original_source.prefix) deepMerge
          Json.obj(nxv.score.prefix -> Json.fromFloatOrNull(score))
    }
    def json(total: Long, list: List[QueryResult[A]]): Json =
      Json
        .obj(nxv.total.prefix -> Json.fromLong(total), nxv.results.prefix -> Json.arr(list.map(qrEncoderJson(_)): _*))
        .addContext(searchCtxUri)
        .addContext(resourceCtxUri)

    Encoder.instance {
      case UnscoredQueryResults(total, list, _) =>
        json(total, list) deepMerge Json.obj(nxv.next.prefix -> next.asJson)
      case ScoredQueryResults(total, maxScore, list, _) =>
        json(total, list) deepMerge
          Json.obj(nxv.maxScore.prefix -> maxScore.asJson, nxv.next.prefix -> next.asJson)
    }
  }
}

object QueryResultEncoder extends LowPriorityQueryResultsEncoder {

  implicit def qrsEncoderJson(implicit searchUri: Uri, http: HttpConfig): Encoder[QueryResults[Json]] =
    Encoder.instance { results =>
      val nextLink = results.token.flatMap(next(searchUri, _))
      qrsEncoderJsonLinks[Json](nextLink).apply(results)
    }

  implicit def qrsEncoderJson(
      implicit searchUri: Uri,
      pagination: FromPagination,
      http: HttpConfig
  ): Encoder[QueryResults[SparqlLink]] =
    Encoder.instance { results =>
      val nextLink = next(searchUri, results.total, pagination)
      qrsEncoderJsonLinks[SparqlLink](nextLink).apply(results)
    }

  private def next(current: Uri, total: Long, pagination: FromPagination)(implicit http: HttpConfig): Option[Uri] = {
    val nextFrom = pagination.from + pagination.size
    if (nextFrom < total.toInt) {
      val params = current.query().toMap + (from -> nextFrom.toString) + (size -> pagination.size.toString)
      Some(toPublic(current).withQuery(Query(params)))
    } else None
  }

  private def next(current: Uri, afterToken: String)(implicit http: HttpConfig): Option[Uri] =
    current.query().get(after) match {
      case Some(`afterToken`) => None
      case _ =>
        val params = current.query().toMap + (after -> afterToken) - from
        Some(toPublic(current).withQuery(Query(params)))
    }

  private def toPublic(uri: Uri)(implicit http: HttpConfig): Uri =
    uri.copy(scheme = http.publicUri.scheme, authority = http.publicUri.authority)
} 
Example 102
Source File: QueryResultEncoderSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.search

import java.time.Instant
import java.util.regex.Pattern.quote

import akka.http.scaladsl.model.Uri
import akka.http.scaladsl.model.Uri.Query
import ch.epfl.bluebrain.nexus.commons.circe.syntax._
import ch.epfl.bluebrain.nexus.commons.search.QueryResult.{ScoredQueryResult, UnscoredQueryResult}
import ch.epfl.bluebrain.nexus.commons.search.QueryResults
import ch.epfl.bluebrain.nexus.commons.search.QueryResults.{ScoredQueryResults, UnscoredQueryResults}
import ch.epfl.bluebrain.nexus.commons.test.{Randomness, Resources}
import ch.epfl.bluebrain.nexus.kg.config.AppConfig
import ch.epfl.bluebrain.nexus.kg.config.AppConfig.HttpConfig
import ch.epfl.bluebrain.nexus.kg.search.QueryResultEncoder._
import ch.epfl.bluebrain.nexus.rdf.Iri.AbsoluteIri
import ch.epfl.bluebrain.nexus.rdf.implicits._
import io.circe.Json
import io.circe.syntax._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class QueryResultEncoderSpec extends AnyWordSpecLike with Matchers with Resources with Randomness {

  implicit val orderedKeys = AppConfig.orderedKeys
  val org                  = genString()
  val proj                 = genString()
  val schema               = genString()
  val now                  = Instant.now()
  implicit val http        = HttpConfig("", 0, "v1", "http://nexus.com")
  implicit val uri         = Uri(s"http://nexus.com/resources/$org/$proj/$schema?type=someType&from=10&size=10")
  val before               = now.minusSeconds(60)

  "QueryResultsEncoder" should {
    def json(id: AbsoluteIri, createdAt: Instant): Json =
      jsonContentOf(
        "/resources/es-metadata.json",
        Map(
          quote("{id}")      -> id.asString,
          quote("{org}")     -> org,
          quote("{proj}")    -> proj,
          quote("{schema}")  -> schema,
          quote("{instant}") -> createdAt.toString
        )
      ) deepMerge Json.obj("_original_source" -> Json.fromString(Json.obj("k" -> Json.fromInt(1)).noSpaces))

    "encode ScoredQueryResults" in {
      val results: QueryResults[Json] = ScoredQueryResults[Json](
        3,
        0.3f,
        List(
          ScoredQueryResult(0.3f, json(url"http://nexus.com/result1", before)),
          ScoredQueryResult(0.2f, json(url"http://nexus.com/result2", before)),
          ScoredQueryResult(0.1f, json(url"http://nexus.com/result3", now))
        ),
        sort(now)
      )

      results.asJson.sortKeys shouldEqual jsonContentOf(
        "/search/scored-query-results.json",
        Map(
          quote("{org}")                -> org,
          quote("{proj}")               -> proj,
          quote("{schema}")             -> schema,
          quote("{before}")             -> before.toString,
          quote("{lastElementCreated}") -> now.toString,
          quote("{after}")              -> after(now)
        )
      )
    }
    "encode UnscoredQueryResults" in {
      val results: QueryResults[Json] = UnscoredQueryResults[Json](
        3,
        List(
          UnscoredQueryResult(json(url"http://nexus.com/result1", before)),
          UnscoredQueryResult(json(url"http://nexus.com/result2", before)),
          UnscoredQueryResult(json(url"http://nexus.com/result3", now))
        ),
        sort(now)
      )

      results.asJson.sortKeys shouldEqual jsonContentOf(
        "/search/unscored-query-results.json",
        Map(
          quote("{org}")                -> org,
          quote("{proj}")               -> proj,
          quote("{schema}")             -> schema,
          quote("{before}")             -> before.toString,
          quote("{lastElementCreated}") -> now.toString,
          quote("{after}")              -> after(now)
        )
      )

    }
  }

  private def sort(instant: Instant): Option[String] = Some(Json.arr(Json.fromString(instant.toString)).noSpaces)
  private def after(instant: Instant): String =
    Query("after" -> List(Json.fromString(instant.toString)).asJson.noSpaces).toString()

} 
Example 103
Source File: DiskStorageOperationsSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.storage

import java.nio.file.Paths

import akka.http.scaladsl.model.{ContentTypes, Uri}
import cats.effect.IO
import ch.epfl.bluebrain.nexus.commons.test._
import ch.epfl.bluebrain.nexus.commons.test.io.IOEitherValues
import ch.epfl.bluebrain.nexus.kg.config.AppConfig._
import ch.epfl.bluebrain.nexus.kg.config.Settings
import ch.epfl.bluebrain.nexus.kg.resources.file.File.FileDescription
import ch.epfl.bluebrain.nexus.kg.resources.Id
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.{KgError, TestHelper}
import ch.epfl.bluebrain.nexus.sourcing.RetryStrategyConfig
import org.mockito.IdiomaticMockito
import org.scalatest.{BeforeAndAfter, OptionValues}
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

import scala.concurrent.duration._

class DiskStorageOperationsSpec
    extends ActorSystemFixture("DiskStorageOperationsSpec")
    with AnyWordSpecLike
    with Matchers
    with BeforeAndAfter
    with IdiomaticMockito
    with IOEitherValues
    with Resources
    with TestHelper
    with OptionValues {

  private val appConfig = Settings(system).appConfig

  private implicit val sc: StorageConfig = appConfig.storage.copy(
    DiskStorageConfig(Paths.get("/tmp"), "SHA-256", read, write, false, 1024L),
    RemoteDiskStorageConfig("http://example.com", "v1", None, "SHA-256", read, write, true, 1024L),
    S3StorageConfig("MD5", read, write, true, 1024L),
    "password",
    "salt",
    RetryStrategyConfig("linear", 300.millis, 5.minutes, 100, 1.second)
  )

  private val project  = ProjectRef(genUUID)
  private val storage  = Storage.DiskStorage.default(project)
  private val resId    = Id(storage.ref, genIri)
  private val fileDesc = FileDescription("my file.txt", ContentTypes.`text/plain(UTF-8)`)

  "DiskStorageOperations" should {

    "verify when the storage exists" in {
      val verify = new DiskStorageOperations.VerifyDiskStorage[IO](storage)
      verify.apply.accepted
    }

    "save and fetch files" in {
      val save   = new DiskStorageOperations.SaveDiskFile[IO](storage)
      val fetch  = new DiskStorageOperations.FetchDiskFile[IO]()
      val source = genSource

      val attr = save.apply(resId, fileDesc, source).ioValue
      attr.bytes shouldEqual 16L
      attr.filename shouldEqual fileDesc.filename
      attr.mediaType shouldEqual fileDesc.mediaType.value
      attr.location shouldEqual Uri(s"file:///tmp/${mangle(project, attr.uuid, "my%20file.txt")}")
      attr.path shouldEqual attr.location.path.tail.tail.tail
      val fetched = fetch.apply(attr).ioValue

      consume(source) shouldEqual consume(fetched)
    }

    "not link files" in {
      val link = new DiskStorageOperations.LinkDiskFile[IO]()
      link.apply(resId, fileDesc, Uri.Path("/foo")).failed[KgError] shouldEqual KgError.UnsupportedOperation
    }
  }

} 
Example 104
Source File: RemoteDiskStorageOperationsSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.storage

import akka.http.scaladsl.model.ContentTypes._
import akka.http.scaladsl.model.Uri
import cats.effect.IO
import ch.epfl.bluebrain.nexus.commons.test.io.IOEitherValues
import ch.epfl.bluebrain.nexus.commons.test.{ActorSystemFixture, Resources}
import ch.epfl.bluebrain.nexus.iam.client.types.{AuthToken, Permission}
import ch.epfl.bluebrain.nexus.kg.TestHelper
import ch.epfl.bluebrain.nexus.kg.resources.file.File.{Digest, FileAttributes, FileDescription}
import ch.epfl.bluebrain.nexus.kg.resources.Id
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.storage.Storage.RemoteDiskStorage
import ch.epfl.bluebrain.nexus.storage.client.StorageClient
import ch.epfl.bluebrain.nexus.storage.client.types.FileAttributes.{Digest => StorageDigest}
import ch.epfl.bluebrain.nexus.storage.client.types.{FileAttributes => StorageFileAttributes}
import org.mockito.{IdiomaticMockito, Mockito}
import org.scalatest.BeforeAndAfter
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpecLike

class RemoteDiskStorageOperationsSpec
    extends ActorSystemFixture("RemoteDiskStorageOperationsSpec")
    with AnyWordSpecLike
    with Matchers
    with BeforeAndAfter
    with IdiomaticMockito
    with IOEitherValues
    with Resources
    with TestHelper {

  private val endpoint = "http://nexus.example.com/v1"

  sealed trait Ctx {
    val cred                              = genString()
    implicit val token: Option[AuthToken] = Some(AuthToken(cred))
    val path                              = Uri.Path(s"${genString()}/${genString()}")
    // format: off
    val storage = RemoteDiskStorage(ProjectRef(genUUID), genIri, 1L, false, false, "SHA-256", endpoint, Some(cred), genString(), Permission.unsafe(genString()), Permission.unsafe(genString()), 1024L)
    val attributes = FileAttributes(s"$endpoint/${storage.folder}/$path", path, s"${genString()}.json", `application/json`, 12L, Digest("SHA-256", genString()))
    // format: on
  }

  private val client = mock[StorageClient[IO]]

  before {
    Mockito.reset(client)
  }

  "RemoteDiskStorageOperations" should {

    "verify when storage exists" in new Ctx {
      client.exists(storage.folder) shouldReturn IO(true)
      val verify = new RemoteDiskStorageOperations.Verify[IO](storage, client)
      verify.apply.accepted
    }

    "verify when storage does not exists" in new Ctx {
      client.exists(storage.folder) shouldReturn IO(false)
      val verify = new RemoteDiskStorageOperations.Verify[IO](storage, client)
      verify.apply
        .rejected[String] shouldEqual s"Folder '${storage.folder}' does not exists on the endpoint '${storage.endpoint}'"
    }

    "fetch file" in new Ctx {
      val source = genSource
      client.getFile(storage.folder, path) shouldReturn IO(source)
      val fetch        = new RemoteDiskStorageOperations.Fetch[IO](storage, client)
      val resultSource = fetch.apply(attributes).ioValue
      consume(resultSource) shouldEqual consume(source)
    }

    "link file" in new Ctx {
      val id               = Id(storage.ref, genIri)
      val sourcePath       = Uri.Path(s"${genString()}/${genString()}")
      val destRelativePath = Uri.Path(mangle(storage.ref, attributes.uuid, attributes.filename))
      client.moveFile(storage.folder, sourcePath, destRelativePath) shouldReturn
        IO(
          StorageFileAttributes(
            attributes.location,
            attributes.bytes,
            StorageDigest(attributes.digest.algorithm, attributes.digest.value),
            attributes.mediaType
          )
        )
      val link = new RemoteDiskStorageOperations.Link[IO](storage, client)
      link
        .apply(id, FileDescription(attributes.uuid, attributes.filename, Some(attributes.mediaType)), sourcePath)
        .ioValue shouldEqual attributes.copy(path = destRelativePath)
    }
  }
} 
Example 105
Source File: PackageObjectSpec.scala    From nexus-kg   with Apache License 2.0 5 votes vote down vote up
package ch.epfl.bluebrain.nexus.kg.storage

import java.nio.file.Paths
import java.util.UUID

import akka.actor.ActorSystem
import akka.http.scaladsl.model.Uri
import akka.stream.scaladsl.FileIO
import ch.epfl.bluebrain.nexus.kg.resources.ProjectIdentifier.ProjectRef
import ch.epfl.bluebrain.nexus.kg.resources.file.File.Digest
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.flatspec.AnyFlatSpecLike
import org.scalatest.matchers.should.Matchers

class PackageObjectSpec extends AnyFlatSpecLike with Matchers with ScalaFutures {

  "uriToPath" should "convert an Akka Uri that represents a valid file path to a Java Path" in {
    uriToPath("file:///some/path/my%20file.txt") shouldEqual Some(Paths.get("/some/path/my file.txt"))
    uriToPath("s3://some/path") shouldEqual None
    uriToPath("foo") shouldEqual None
  }

  "pathToUri" should "convert a Java Path to an Akka Uri" in {
    pathToUri(Paths.get("/some/path/my file.txt")) shouldEqual Uri("file:///some/path/my%20file.txt")
  }

  "mangle" should "generate a properly mangled path given a file project and UUID" in {
    val projUuid = UUID.fromString("4947db1e-33d8-462b-9754-3e8ae74fcd4e")
    val fileUuid = UUID.fromString("b1d7cda2-1ec0-40d2-b12e-3baf4895f7d7")
    mangle(ProjectRef(projUuid), fileUuid, "my file.jpg") shouldEqual
      "4947db1e-33d8-462b-9754-3e8ae74fcd4e/b/1/d/7/c/d/a/2/my file.jpg"
  }

  "digest" should "properly compute the hash of a given input" in {
    implicit val as: ActorSystem = ActorSystem()

    val filePath = "/storage/s3.json"
    val path     = Paths.get(getClass.getResource(filePath).toURI)
    val input    = FileIO.fromPath(path)
    val algo     = "SHA-256"

    input.runWith(digestSink(algo)(as.dispatcher)).futureValue shouldEqual Digest(
      algo,
      "5602c497e51680bef1f3120b1d6f65d480555002a3290029f8178932e8f4801a"
    )
  }
} 
Example 106
Source File: UserRepository.scala    From gabbler   with Apache License 2.0 5 votes vote down vote up
package de.heikoseeberger.gabbler.chat

import akka.actor.{ ActorLogging, Props }
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.Uri
import akka.persistence.{ PersistentActor, RecoveryCompleted }
import akka.stream.ActorMaterializer
import akka.stream.alpakka.sse.scaladsl.EventSource
import de.heikoseeberger.akkasse.ServerSentEvent
import io.circe.parser.decode

object UserRepository {

  private sealed trait UserEvent

  final case class FindUserByUsername(username: String)
  final case class UsernameUnknown(username: String)

  private final case class AddUser(id: Long, username: String, nickname: String)
  private final case class UserAdded(eventId: String, user: User)

  private final case class RemoveUser(id: Long)
  private final case class UserRemoved(eventId: String, user: User)

  final case class User(id: Long, username: String, nickname: String)

  final val Name = "user-repository"

  def apply(userEventsEndpoint: Uri): Props =
    Props(new UserRepository(userEventsEndpoint))
}

final class UserRepository(userEventsEndpoint: Uri) extends PersistentActor with ActorLogging {
  import UserRepository._
  import io.circe.generic.auto._

  override val persistenceId = Name

  private implicit val mat = ActorMaterializer()

  private var users = Map.empty[String, User]

  private var lastEventId = Option.empty[String]

  override def receiveCommand = {
    case FindUserByUsername(n)               => handleFindUserByUsername(n)
    case (eventId: String, AddUser(i, u, n)) => handleAddUser(eventId, i, u, n)
    case (eventId: String, RemoveUser(i))    => handleRemoveUser(eventId, i)
  }

  override def receiveRecover = {
    case RecoveryCompleted =>
      userEvents(lastEventId).runForeach(self ! _)

    case UserAdded(eventId, user) =>
      lastEventId = Some(eventId)
      users += user.username -> user
      log.info("Added user with username {}", user.username)

    case UserRemoved(eventId, user) =>
      lastEventId = Some(eventId)
      users -= user.username
      log.info("Removed user with username {}", user.username)
  }

  private def handleFindUserByUsername(username: String) =
    users.get(username) match {
      case Some(user) => sender() ! user
      case None       => sender() ! UsernameUnknown(username)
    }

  private def handleAddUser(eventId: String, id: Long, username: String, nickname: String) =
    persist(UserAdded(eventId, User(id, username, nickname)))(receiveRecover)

  private def handleRemoveUser(eventId: String, id: Long) =
    users.values.find(_.id == id) match {
      case Some(user) => persist(UserRemoved(eventId, user))(receiveRecover)
      case None       => log.warning("User with id {} does not exist!", id)
    }

  private def userEvents(lastEventId: Option[String]) =
    EventSource(userEventsEndpoint, Http(context.system).singleRequest(_), lastEventId)
      .collect {
        case ServerSentEvent(Some(data), Some("user-added"), Some(eventId), _) =>
          eventId -> decode[AddUser](data)
        case ServerSentEvent(Some(data), Some("user-removed"), Some(eventId), _) =>
          eventId -> decode[RemoveUser](data)
      }
      .collect { case (eventId, Right(userEvent)) => eventId -> userEvent }
} 
Example 107
Source File: ChatApp.scala    From gabbler   with Apache License 2.0 5 votes vote down vote up
package de.heikoseeberger.gabbler.chat

import akka.NotUsed
import akka.actor.{ Actor, ActorLogging, ActorSystem, Props, SupervisorStrategy, Terminated }
import akka.cluster.Cluster
import akka.cluster.singleton.{
  ClusterSingletonManager,
  ClusterSingletonManagerSettings,
  ClusterSingletonProxy,
  ClusterSingletonProxySettings
}
import akka.http.scaladsl.model.Uri
import scala.concurrent.Await
import scala.concurrent.duration.Duration

object ChatApp {

  private final class Root extends Actor with ActorLogging {

    override val supervisorStrategy = SupervisorStrategy.stoppingStrategy

    private val userRepository = {
      val userEvents =
        Uri(context.system.settings.config.getString("gabbler-chat.user-repository.user-events"))
      val userRepository =
        context.actorOf(
          ClusterSingletonManager.props(UserRepository(userEvents),
                                        NotUsed,
                                        ClusterSingletonManagerSettings(context.system)),
          UserRepository.Name
        )
      context.actorOf(
        ClusterSingletonProxy.props(userRepository.path.elements.mkString("/", "/", ""),
                                    ClusterSingletonProxySettings(context.system)),
        s"${UserRepository.Name}-proxy"
      )
    }

    context.watch(userRepository)
    log.info("gabbler-chat up and running")

    override def receive = {
      case Terminated(actor) =>
        log.error("Terminating the system because {} terminated!", actor.path)
        context.system.terminate()
    }
  }

  def main(args: Array[String]): Unit = {
    val system = ActorSystem("gabbler-chat")
    Cluster(system).registerOnMemberUp(system.actorOf(Props(new Root), "root"))
  }
} 
Example 108
Source File: ServeSpec.scala    From typed-schema   with Apache License 2.0 5 votes vote down vote up
package ru.tinkoff.tschema.akkaHttp

import akka.http.scaladsl.model.Multipart.FormData
import akka.http.scaladsl.model.Uri.Query
import akka.http.scaladsl.model.{HttpEntity, Uri}
import akka.http.scaladsl.server.MissingQueryParamRejection
import akka.http.scaladsl.testkit.ScalatestRouteTest
import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport._
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import ru.tinkoff.tschema.syntax

class ServeSpec extends AnyWordSpec with Matchers with ScalatestRouteTest {
  trait Small

  import ru.tinkoff.tschema.syntax._
  val dsl = syntax

  val intAnswer = 42

  object handler {
    val int = 42

    def repeat(body: String, n: Int) = body * n

    def multiply(x: Long, y: Double) = f"result is ${x * y}%.2f"

    def size(args: List[Int]) = args.size

    def min(args: List[Int]) = args.min
  }

  def api = (keyPrefix("int") :> get :> complete[Int]) ~
    (keyPrefix("repeat") :> reqBody[String] :> queryParam[Int]("n") :> post :> complete[String]) ~
    (keyPrefix("multiply") :> formField[Long]("x") :> formField[Double]("y") :> post :> complete[String]) ~
    (keyPrefix("size") :> queryParams[Option[Int]]("args") :> post :> complete[Int]) ~
    (keyPrefix("min") :> queryParams[Int]("args") :> post :> complete[Int])

  val route = MkRoute(api)(handler)

  "Simple service" should {
    "return a simple int" in {
      Get("/int") ~> route ~> check {
        responseAs[Int] shouldEqual intAnswer
      }
    }

    "multiply string by n times" in {
      Post(Uri("/repeat").withQuery(Query("n" -> "5")), "batman") ~> route ~> check {
        responseAs[String] shouldEqual ("batman" * 5)
      }
    }

    "multiply numbers from formdata" in {
      Post(Uri("/multiply"), FormData(Map("x" -> HttpEntity("3"), "y" -> HttpEntity("1.211")))) ~>
        route ~>
        check {
          responseAs[String] shouldEqual f"result is ${3.63}%.2f"
        }
    }

    "return size of empty args" in {
      Post(Uri("/size")) ~> route ~> check {
        responseAs[Int] shouldEqual 0
      }
    }

    "return size of non empty args" in {
      Post(Uri("/size").withQuery(Query(List("1", "2", "3").map("args" -> _): _*))) ~> route ~> check {
        responseAs[Int] shouldEqual 3
      }
    }

    "return min of non empty args" in {
      Post(Uri("/min").withQuery(Query(List("3", "1", "2").map("args" -> _): _*))) ~> route ~> check {
        responseAs[Int] shouldEqual 1
      }
    }

    "reject on min with empty args" in {
      Post(Uri("/min")) ~> route ~> check {
        rejection shouldEqual MissingQueryParamRejection("args")
      }
    }
  }
} 
Example 109
Source File: ExprInterpreter.scala    From iep-apps   with Apache License 2.0 5 votes vote down vote up
package com.netflix.iep.lwc.fwd.admin

import akka.http.scaladsl.model.Uri
import com.netflix.atlas.core.model.CustomVocabulary
import com.netflix.atlas.core.model.ModelExtractors
import com.netflix.atlas.core.model.StyleExpr
import com.netflix.atlas.core.stacklang.Interpreter
import com.typesafe.config.Config
import javax.inject.Inject

class ExprInterpreter @Inject()(config: Config) {

  private val interpreter = Interpreter(new CustomVocabulary(config).allWords)

  def eval(atlasUri: String): List[StyleExpr] = {
    eval(Uri(atlasUri))
  }

  def eval(uri: Uri): List[StyleExpr] = {
    val expr = uri.query().get("q").getOrElse {
      throw new IllegalArgumentException(
        s"missing required URI parameter `q`: $uri"
      )
    }

    doEval(expr)
  }

  def doEval(expr: String): List[StyleExpr] = {
    interpreter.execute(expr).stack.map {
      case ModelExtractors.PresentationType(t) => t
    }
  }

} 
Example 110
Source File: UriHelpersSpec.scala    From twitter4s   with Apache License 2.0 5 votes vote down vote up
package com.danielasfregola.twitter4s.util

import akka.http.scaladsl.model.Uri
import org.specs2.mutable.Specification

class UriHelpersSpec extends Specification with UriHelpers {

  "UriHelpers" should {

    "extract an endpoint representation" in {

      "from a uri" in {
        val uri = Uri("https://api.twitter.com/1.1/lists/members/create.json?param1=8044403&param2=daniela")
        uri.endpoint === "https://api.twitter.com/1.1/lists/members/create.json"
      }

      "from a uri with explicit port" in {
        val uri = Uri("http://example.com:8080/path?p=test")
        uri.endpoint === "http://example.com:8080/path"
      }
    }
  }
} 
Example 111
Source File: AkkaDecodeInputsContext.scala    From tapir   with Apache License 2.0 5 votes vote down vote up
package sttp.tapir.server.akkahttp

import java.util.Locale

import akka.http.scaladsl.model.headers.{`Content-Length`, `Content-Type`}
import akka.http.scaladsl.model.{HttpHeader, Uri}
import akka.http.scaladsl.server.RequestContext
import sttp.model.{Method, QueryParams}
import sttp.tapir.model.ServerRequest
import sttp.tapir.server.internal.DecodeInputsContext

private[akkahttp] class AkkaDecodeInputsContext(req: RequestContext) extends DecodeInputsContext {

  // Add low-level headers that have been removed by akka-http.
  // https://doc.akka.io/docs/akka-http/current/common/http-model.html?language=scala#http-headers
  // https://github.com/softwaremill/tapir/issues/331
  private lazy val allHeaders: List[HttpHeader] = {
    val contentLength = req.request.entity.contentLengthOption.map(`Content-Length`(_))
    val contentType = `Content-Type`(req.request.entity.contentType)
    contentType :: contentLength.toList ++ req.request.headers
  }

  override def method: Method = Method(req.request.method.value)
  override def nextPathSegment: (Option[String], DecodeInputsContext) = {
    req.unmatchedPath match {
      case Uri.Path.Slash(pathTail)      => new AkkaDecodeInputsContext(req.withUnmatchedPath(pathTail)).nextPathSegment
      case Uri.Path.Segment(s, pathTail) => (Some(s), new AkkaDecodeInputsContext(req.withUnmatchedPath(pathTail)))
      case _                             => (None, this)
    }
  }
  override def header(name: String): List[String] = {
    val nameInLowerCase = name.toLowerCase(Locale.ROOT)
    allHeaders.filter(_.is(nameInLowerCase)).map(_.value)
  }
  override def headers: Seq[(String, String)] = allHeaders.map(h => (h.name, h.value))
  override def queryParameter(name: String): Seq[String] = req.request.uri.query().getAll(name).reverse
  override def queryParameters: QueryParams = QueryParams.fromSeq(req.request.uri.query())
  override def bodyStream: Any = req.request.entity.dataBytes
  override def serverRequest: ServerRequest = new AkkaServerRequest(req)
} 
Example 112
Source File: Utils.scala    From akka-pusher   with MIT License 5 votes vote down vote up
package com.github.dtaniwaki.akka_pusher

import java.math.BigInteger
import java.security.MessageDigest
import javax.crypto.Mac
import javax.crypto.spec.SecretKeySpec

import akka.http.scaladsl.model.Uri

object Utils {
  val HEX = 16
  val LENGTH = 32

  def byteArrayToString(data: Array[Byte]): String = {
    val bigInteger = new BigInteger(1, data)
    var hash = bigInteger.toString(HEX)

    while (hash.length() < LENGTH) {
      hash = "0" + hash
    }

    hash
  }

  def md5(string: String): String = {
    val bytesOfMessage = string.getBytes("UTF-8")
    val md = MessageDigest.getInstance("MD5")
    val digest = md.digest(bytesOfMessage)
    byteArrayToString(digest)
  }

  def sha256(secret: String, string: String): String = {
    val signingKey = new SecretKeySpec(secret.getBytes(), "HmacSHA256")

    val mac = Mac.getInstance("HmacSHA256")
    mac.init(signingKey)

    val digest = mac.doFinal(string.getBytes("UTF-8"))

    val bigInteger = new BigInteger(1, digest)
    String.format("%0" + (digest.length << 1) + "x", bigInteger)
  }

  def normalizeQuery(query: Uri.Query): Uri.Query = {
    Uri.Query(query.map { case (k, v) => (k.toString.toLowerCase, v) }.sortBy(_._1): _*)
  }
} 
Example 113
Source File: UtilsSpec.scala    From akka-pusher   with MIT License 5 votes vote down vote up
package com.github.dtaniwaki.akka_pusher

import akka.http.scaladsl.model.Uri
import org.specs2.mutable.Specification
import org.specs2.specification.process.RandomSequentialExecution

class UtilsSpec extends Specification
    with SpecHelper
    with RandomSequentialExecution {
  "#byteArrayToString" should {
    "convert a byte array to a string" in {
      Utils.byteArrayToString(Array[Byte](10, 11, 12)) === "000000000000000000000000000a0b0c"
    }
  }
  "#md5" should {
    "generate hex digest md5" in {
      Utils.md5("foo") === "acbd18db4cc2f85cedef654fccc4a4d8"
    }
  }
  "#sha256" should {
    "generate hex digest sha256" in {
      Utils.sha256("secret", "foo") === "773ba44693c7553d6ee20f61ea5d2757a9a4f4a44d2841ae4e95b52e4cd62db4"
    }
    "generate hex digest sha256 of strings with 日本語" in {
      Utils.sha256("secret", "Digest me 日本語") === "b52446253d26c4bd19c1200e310ddc8ff3678f3422b2df6c47b153209cadec0b"
      // echo -n "Digest me 日本語" | openssl dgst -sha256 -hmac "secret"
    }
  }
  "#normalizeQuery" should {
    "make the key lower case" in {
      Utils.normalizeQuery(Uri.Query(Map("abc" -> "abc", "CDE" -> "CDE"))).toString === "abc=abc&cde=CDE"
    }
    "make the order alphabetical" in {
      Utils.normalizeQuery(Uri.Query(Map("cde" -> "cde", "abc" -> "abc"))).toString === "abc=abc&cde=cde"
    }
  }
} 
Example 114
Source File: BasicAuthenticationTest.scala    From sumobot   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.sumobot.http_frontend.authentication

import akka.http.scaladsl.model.HttpMethods.{GET, POST}
import akka.http.scaladsl.model.{HttpRequest, StatusCodes, Uri}
import com.sumologic.sumobot.test.annotated.SumoBotSpec
import akka.http.scaladsl.model.headers._
import com.typesafe.config.ConfigFactory
import scala.collection.JavaConverters._

class BasicAuthenticationTest extends SumoBotSpec {
  private val authConfig = ConfigFactory.parseMap(
    Map("username" -> "admin", "password" -> "hunter2").asJava)
  val basicAuthentication = new BasicAuthentication(authConfig)
  val base64Credentials = "YWRtaW46aHVudGVyMg=="
  val base64InvalidCredentials = "YWRtaW46aHVpdGVyMg=="

  val rootRequest = HttpRequest(GET, Uri("/"))
  val authorizedRootRequest = rootRequest.withHeaders(List(`Authorization`(GenericHttpCredentials("basic", base64Credentials))))
  val invalidRootRequest = rootRequest.withHeaders(List(`Authorization`(GenericHttpCredentials("basic", base64InvalidCredentials))))

  "BasicAuthentication" should {
      "return 401 Unauthorized" when {
        "unauthenticated" in {
          val result = basicAuthentication.authentication(rootRequest)
          result match {
            case AuthenticationForbidden(response) =>
              response.status should be(StatusCodes.Unauthorized)
              response.header[`WWW-Authenticate`].nonEmpty should be(true)
            case _ =>
              fail("expected AuthenticationForbidden")
          }
        }
      }

    "successfuly authenticate" when {
      "provided correct Authorization header" in {
        val result = basicAuthentication.authentication(authorizedRootRequest)
        result match {
          case AuthenticationSucceeded(info) =>
            info.authMessage match {
              case Some(message) =>
                message should include("admin")
              case _ => fail("expected authMessage")
            }
          case _ =>
            fail("expected AuthenticationSucceeded")
        }
      }
    }

    "return 403 Forbidden" when {
      "provided incorrect Authorization header" in {
        val result = basicAuthentication.authentication(invalidRootRequest)
        result match {
          case AuthenticationForbidden(response) =>
            response.status should be(StatusCodes.Forbidden)
          case _ =>
            fail("expected AuthenticationForbidden")
        }
      }
    }
  }
} 
Example 115
Source File: NoAuthenticationTest.scala    From sumobot   with Apache License 2.0 5 votes vote down vote up
package com.sumologic.sumobot.http_frontend.authentication

import akka.http.scaladsl.model.HttpMethods.{GET, POST}
import akka.http.scaladsl.model.{HttpRequest, Uri}
import com.sumologic.sumobot.test.annotated.SumoBotSpec
import com.typesafe.config.ConfigFactory

class NoAuthenticationTest extends SumoBotSpec {
  val emptyRequest = HttpRequest()
  val rootRequest = HttpRequest(GET, Uri("/"))
  val postRequest = HttpRequest(POST, Uri("/endpoint"))

  val noAuthentication = new NoAuthentication(ConfigFactory.empty())

  "NoAuthentication" should {
    "allow all requests" in {
      noAuthentication.authentication(emptyRequest) shouldBe a[AuthenticationSucceeded]
      noAuthentication.authentication(rootRequest) shouldBe a[AuthenticationSucceeded]
      noAuthentication.authentication(postRequest) shouldBe a[AuthenticationSucceeded]
    }
  }
}